diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md deleted file mode 100644 index 00a3c52da..000000000 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ /dev/null @@ -1,37 +0,0 @@ ---- -name: Bug report -about: Create a report to help us improve -title: '[BUG]' -labels: '' -assignees: '' ---- - -**Describe the bug** -A clear and concise description of what the bug is. - -**To Reproduce** -Steps to reproduce the behavior: - -1. Go to '...' -2. Click on '....' -3. Scroll down to '....' -4. See error - -**Expected behavior** -A clear and concise description of what you expected to happen. - -**Screenshots** -If applicable, add screenshots to help explain your problem. - -**Flow** -If applicable, add exported flow in order to help replicating the problem. - -**Setup** - -- Installation [e.g. docker, `npx flowise start`, `pnpm start`] -- Flowise Version [e.g. 1.2.11] -- OS: [e.g. macOS, Windows, Linux] -- Browser [e.g. chrome, safari] - -**Additional context** -Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml new file mode 100644 index 000000000..40e06c9c1 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -0,0 +1,101 @@ +name: Bug Report +description: File a bug report to help us improve +labels: ['bug'] +assignees: [] +body: + - type: markdown + attributes: + value: | + Make sure to have a proper title and description. + + - type: textarea + id: bug-description + attributes: + label: Describe the bug + description: A clear and concise description of what the bug is. + placeholder: Tell us what you see! + validations: + required: true + + - type: textarea + id: reproduce + attributes: + label: To Reproduce + description: Steps to reproduce the behavior + placeholder: | + 1. Go to '...' + 2. Click on '....' + 3. Scroll down to '....' + 4. See error + validations: + required: true + + - type: textarea + id: expected + attributes: + label: Expected behavior + description: A clear and concise description of what you expected to happen. + validations: + required: true + + - type: textarea + id: screenshots + attributes: + label: Screenshots + description: If applicable, add screenshots to help explain your problem. + placeholder: Drag and drop or paste screenshots here + + - type: textarea + id: flow + attributes: + label: Flow + description: If applicable, add exported flow in order to help replicating the problem. + placeholder: Paste your exported flow here + + - type: dropdown + id: method + attributes: + label: Use Method + description: How did you use Flowise? + options: + - Flowise Cloud + - Docker + - npx flowise start + - pnpm start + + - type: input + id: version + attributes: + label: Flowise Version + description: What version of Flowise are you running? + placeholder: e.g., 1.2.11 + + - type: dropdown + id: os + attributes: + label: Operating System + description: What operating system are you using? + options: + - Windows + - macOS + - Linux + - Other + + - type: dropdown + id: browser + attributes: + label: Browser + description: What browser are you using? + options: + - Chrome + - Firefox + - Safari + - Edge + - Other + + - type: textarea + id: context + attributes: + label: Additional context + description: Add any other context about the problem here. + placeholder: Any additional information that might be helpful diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md deleted file mode 100644 index 557a358e9..000000000 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ /dev/null @@ -1,13 +0,0 @@ ---- -name: Feature request -about: Suggest an idea for this project -title: '[FEATURE]' -labels: '' -assignees: '' ---- - -**Describe the feature you'd like** -A clear and concise description of what you would like Flowise to have. - -**Additional context** -Add any other context or screenshots about the feature request here. diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml new file mode 100644 index 000000000..72deb192a --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.yml @@ -0,0 +1,67 @@ +name: Feature Request +description: Suggest a new feature or enhancement for Flowise +labels: ['enhancement'] +assignees: [] +body: + - type: markdown + attributes: + value: | + Thanks for suggesting a new feature! Please provide as much detail as possible to help us understand your request. + + - type: textarea + id: feature-description + attributes: + label: Feature Description + description: A clear and concise description of the feature you'd like to see in Flowise. + placeholder: Describe what you want to be added or improved... + validations: + required: true + + - type: dropdown + id: feature-category + attributes: + label: Feature Category + description: What category does this feature belong to? + options: + - UI/UX Improvement + - New Node/Component + - Integration + - Performance + - Security + - Documentation + - API Enhancement + - Workflow/Flow Management + - Authentication/Authorization + - Database/Storage + - Deployment/DevOps + - Other + validations: + required: true + + - type: textarea + id: problem-statement + attributes: + label: Problem Statement + description: What problem does this feature solve? What's the current pain point? + placeholder: Describe the problem or limitation you're facing... + + - type: textarea + id: proposed-solution + attributes: + label: Proposed Solution + description: How would you like this feature to work? Be as specific as possible. + placeholder: Describe your ideal solution in detail... + + - type: textarea + id: mockups-references + attributes: + label: Mockups or References + description: Any mockups, screenshots, or references to similar features in other tools? + placeholder: Upload images or provide links to examples... + + - type: textarea + id: additional-context + attributes: + label: Additional Context + description: Any other information, context, or examples that would help us understand this request. + placeholder: Add any other relevant information... diff --git a/.github/workflows/autoSyncMergedPullRequest.yml b/.github/workflows/autoSyncMergedPullRequest.yml deleted file mode 100644 index 0868c42e7..000000000 --- a/.github/workflows/autoSyncMergedPullRequest.yml +++ /dev/null @@ -1,33 +0,0 @@ -name: autoSyncMergedPullRequest -on: - pull_request_target: - types: - - closed - branches: ['main'] -jobs: - autoSyncMergedPullRequest: - if: github.event.pull_request.merged == true - runs-on: ubuntu-latest - permissions: - contents: write - steps: - - uses: actions/checkout@v4 - - name: Show PR info - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - echo The PR #${{ github.event.pull_request.number }} was merged on main branch! - - name: Repository Dispatch - uses: peter-evans/repository-dispatch@v3 - with: - token: ${{ secrets.AUTOSYNC_TOKEN }} - repository: ${{ secrets.AUTOSYNC_CH_URL }} - event-type: ${{ secrets.AUTOSYNC_PR_EVENT_TYPE }} - client-payload: >- - { - "ref": "${{ github.ref }}", - "prNumber": "${{ github.event.pull_request.number }}", - "prTitle": "${{ github.event.pull_request.title }}", - "prDescription": "", - "sha": "${{ github.sha }}" - } diff --git a/.github/workflows/autoSyncSingleCommit.yml b/.github/workflows/autoSyncSingleCommit.yml deleted file mode 100644 index 6a661c946..000000000 --- a/.github/workflows/autoSyncSingleCommit.yml +++ /dev/null @@ -1,36 +0,0 @@ -name: autoSyncSingleCommit -on: - push: - branches: - - main -jobs: - doNotAutoSyncSingleCommit: - if: github.event.commits[1] != null - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - name: IGNORE autoSyncSingleCommit - run: | - echo This single commit has came from a merged commit. We will ignore it. This case is handled in autoSyncMergedPullRequest workflow for merge commits comming from merged pull requests only! Beware, the regular merge commits are not handled by any workflow for the moment. - autoSyncSingleCommit: - if: github.event.commits[1] == null - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - name: autoSyncSingleCommit - env: - GITHUB_CONTEXT: ${{ toJSON(github) }} - run: | - echo Autosync a single commit with id: ${{ github.sha }} from openSource main branch towards cloud hosted version. - - name: Repository Dispatch - uses: peter-evans/repository-dispatch@v3 - with: - token: ${{ secrets.AUTOSYNC_TOKEN }} - repository: ${{ secrets.AUTOSYNC_CH_URL }} - event-type: ${{ secrets.AUTOSYNC_SC_EVENT_TYPE }} - client-payload: >- - { - "ref": "${{ github.ref }}", - "sha": "${{ github.sha }}", - "commitMessage": "${{ github.event.commits[0].id }}" - } diff --git a/.github/workflows/docker-image-dockerhub.yml b/.github/workflows/docker-image-dockerhub.yml new file mode 100644 index 000000000..3752ddc7e --- /dev/null +++ b/.github/workflows/docker-image-dockerhub.yml @@ -0,0 +1,72 @@ +name: Docker Image CI - Docker Hub + +on: + workflow_dispatch: + inputs: + node_version: + description: 'Node.js version to build this image with.' + type: choice + required: true + default: '20' + options: + - '20' + tag_version: + description: 'Tag version of the image to be pushed.' + type: string + required: true + default: 'latest' + +jobs: + docker: + runs-on: ubuntu-latest + steps: + - name: Set default values + id: defaults + run: | + echo "node_version=${{ github.event.inputs.node_version || '20' }}" >> $GITHUB_OUTPUT + echo "tag_version=${{ github.event.inputs.tag_version || 'latest' }}" >> $GITHUB_OUTPUT + + - name: Checkout + uses: actions/checkout@v4.1.1 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v3.0.0 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3.0.0 + + - name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + # ------------------------- + # Build and push main image + # ------------------------- + - name: Build and push main image + uses: docker/build-push-action@v5.3.0 + with: + context: . + file: ./docker/Dockerfile + build-args: | + NODE_VERSION=${{ steps.defaults.outputs.node_version }} + platforms: linux/amd64,linux/arm64 + push: true + tags: | + flowiseai/flowise:${{ steps.defaults.outputs.tag_version }} + + # ------------------------- + # Build and push worker image + # ------------------------- + - name: Build and push worker image + uses: docker/build-push-action@v5.3.0 + with: + context: . + file: docker/worker/Dockerfile + build-args: | + NODE_VERSION=${{ steps.defaults.outputs.node_version }} + platforms: linux/amd64,linux/arm64 + push: true + tags: | + flowiseai/flowise-worker:${{ steps.defaults.outputs.tag_version }} diff --git a/.github/workflows/docker-image-ecr.yml b/.github/workflows/docker-image-ecr.yml new file mode 100644 index 000000000..1fc28fb1d --- /dev/null +++ b/.github/workflows/docker-image-ecr.yml @@ -0,0 +1,73 @@ +name: Docker Image CI - AWS ECR + +on: + workflow_dispatch: + inputs: + environment: + description: 'Environment to push the image to.' + required: true + default: 'dev' + type: choice + options: + - dev + - prod + node_version: + description: 'Node.js version to build this image with.' + type: choice + required: true + default: '20' + options: + - '20' + tag_version: + description: 'Tag version of the image to be pushed.' + type: string + required: true + default: 'latest' + +jobs: + docker: + runs-on: ubuntu-latest + environment: ${{ github.event.inputs.environment }} + steps: + - name: Set default values + id: defaults + run: | + echo "node_version=${{ github.event.inputs.node_version || '20' }}" >> $GITHUB_OUTPUT + echo "tag_version=${{ github.event.inputs.tag_version || 'latest' }}" >> $GITHUB_OUTPUT + + - name: Checkout + uses: actions/checkout@v4.1.1 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v3.0.0 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3.0.0 + + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v3 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: ${{ secrets.AWS_REGION }} + + - name: Login to Amazon ECR + uses: aws-actions/amazon-ecr-login@v1 + + # ------------------------- + # Build and push main image + # ------------------------- + - name: Build and push main image + uses: docker/build-push-action@v5.3.0 + with: + context: . + file: Dockerfile + build-args: | + NODE_VERSION=${{ steps.defaults.outputs.node_version }} + platforms: linux/amd64,linux/arm64 + push: true + tags: | + ${{ format('{0}.dkr.ecr.{1}.amazonaws.com/flowise:{2}', + secrets.AWS_ACCOUNT_ID, + secrets.AWS_REGION, + steps.defaults.outputs.tag_version) }} diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml deleted file mode 100644 index 5c58608e1..000000000 --- a/.github/workflows/docker-image.yml +++ /dev/null @@ -1,43 +0,0 @@ -name: Docker Image CI - -on: - workflow_dispatch: - inputs: - node_version: - description: 'Node.js version to build this image with.' - type: choice - required: true - default: '20' - options: - - '20' - tag_version: - description: 'Tag version of the image to be pushed.' - type: string - required: true - default: 'latest' - -jobs: - docker: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v4.1.1 - - name: Set up QEMU - uses: docker/setup-qemu-action@v3.0.0 - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3.0.0 - - name: Login to Docker Hub - uses: docker/login-action@v3 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Build and push - uses: docker/build-push-action@v5.3.0 - with: - context: . - file: ./docker/Dockerfile - build-args: | - NODE_VERSION=${{github.event.inputs.node_version}} - platforms: linux/amd64,linux/arm64 - push: true - tags: flowiseai/flowise:${{github.event.inputs.tag_version}} diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index f8d6fa6f9..1b7139d39 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -6,6 +6,7 @@ on: pull_request: branches: - '*' + workflow_dispatch: permissions: contents: read jobs: @@ -31,6 +32,8 @@ jobs: - run: pnpm install - run: pnpm lint - run: pnpm build + env: + NODE_OPTIONS: '--max_old_space_size=4096' - name: Cypress install run: pnpm cypress install - name: Install dependencies (Cypress Action) diff --git a/.github/workflows/test_docker_build.yml b/.github/workflows/test_docker_build.yml index a27cf22dd..28b970cf8 100644 --- a/.github/workflows/test_docker_build.yml +++ b/.github/workflows/test_docker_build.yml @@ -8,13 +8,12 @@ on: pull_request: branches: - '*' - + workflow_dispatch: jobs: build: runs-on: ubuntu-latest env: PUPPETEER_SKIP_DOWNLOAD: true steps: - - uses: actions/checkout@v3 - + - uses: actions/checkout@v4 - run: docker build --no-cache -t flowise . diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 60735ef18..c939d1a22 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -114,54 +114,52 @@ Flowise has 3 different modules in a single mono repository. to make sure everything works fine in production. -11. Commit code and submit Pull Request from forked branch pointing to [Flowise master](https://github.com/FlowiseAI/Flowise/tree/master). +11. Commit code and submit Pull Request from forked branch pointing to [Flowise main](https://github.com/FlowiseAI/Flowise/tree/main). ## ๐ŸŒฑ Env Variables Flowise support different environment variables to configure your instance. You can specify the following variables in the `.env` file inside `packages/server` folder. Read [more](https://docs.flowiseai.com/environment-variables) -| Variable | Description | Type | Default | -| ---------------------------------- | -------------------------------------------------------------------------------- | ------------------------------------------------ | ----------------------------------- | -| PORT | The HTTP port Flowise runs on | Number | 3000 | -| CORS_ORIGINS | The allowed origins for all cross-origin HTTP calls | String | | -| IFRAME_ORIGINS | The allowed origins for iframe src embedding | String | | -| FLOWISE_USERNAME | Username to login | String | | -| FLOWISE_PASSWORD | Password to login | String | | -| FLOWISE_FILE_SIZE_LIMIT | Upload File Size Limit | String | 50mb | -| DEBUG | Print logs from components | Boolean | | -| LOG_PATH | Location where log files are stored | String | `your-path/Flowise/logs` | -| LOG_LEVEL | Different levels of logs | Enum String: `error`, `info`, `verbose`, `debug` | `info` | -| LOG_JSON_SPACES | Spaces to beautify JSON logs | | 2 | -| APIKEY_STORAGE_TYPE | To store api keys on a JSON file or database. Default is `json` | Enum String: `json`, `db` | `json` | -| APIKEY_PATH | Location where api keys are saved when `APIKEY_STORAGE_TYPE` is `json` | String | `your-path/Flowise/packages/server` | -| TOOL_FUNCTION_BUILTIN_DEP | NodeJS built-in modules to be used for Tool Function | String | | -| TOOL_FUNCTION_EXTERNAL_DEP | External modules to be used for Tool Function | String | | -| DATABASE_TYPE | Type of database to store the flowise data | Enum String: `sqlite`, `mysql`, `postgres` | `sqlite` | -| DATABASE_PATH | Location where database is saved (When DATABASE_TYPE is sqlite) | String | `your-home-dir/.flowise` | -| DATABASE_HOST | Host URL or IP address (When DATABASE_TYPE is not sqlite) | String | | -| DATABASE_PORT | Database port (When DATABASE_TYPE is not sqlite) | String | | -| DATABASE_USER | Database username (When DATABASE_TYPE is not sqlite) | String | | -| DATABASE_PASSWORD | Database password (When DATABASE_TYPE is not sqlite) | String | | -| DATABASE_NAME | Database name (When DATABASE_TYPE is not sqlite) | String | | -| DATABASE_SSL_KEY_BASE64 | Database SSL client cert in base64 (takes priority over DATABASE_SSL) | Boolean | false | -| DATABASE_SSL | Database connection overssl (When DATABASE_TYPE is postgre) | Boolean | false | -| SECRETKEY_PATH | Location where encryption key (used to encrypt/decrypt credentials) is saved | String | `your-path/Flowise/packages/server` | -| FLOWISE_SECRETKEY_OVERWRITE | Encryption key to be used instead of the key stored in SECRETKEY_PATH | String | | -| MODEL_LIST_CONFIG_JSON | File path to load list of models from your local config file | String | `/your_model_list_config_file_path` | -| STORAGE_TYPE | Type of storage for uploaded files. default is `local` | Enum String: `s3`, `local`, `gcs` | `local` | -| BLOB_STORAGE_PATH | Local folder path where uploaded files are stored when `STORAGE_TYPE` is `local` | String | `your-home-dir/.flowise/storage` | -| S3_STORAGE_BUCKET_NAME | Bucket name to hold the uploaded files when `STORAGE_TYPE` is `s3` | String | | -| S3_STORAGE_ACCESS_KEY_ID | AWS Access Key | String | | -| S3_STORAGE_SECRET_ACCESS_KEY | AWS Secret Key | String | | -| S3_STORAGE_REGION | Region for S3 bucket | String | | -| S3_ENDPOINT_URL | Custom Endpoint for S3 | String | | -| S3_FORCE_PATH_STYLE | Set this to true to force the request to use path-style addressing | Boolean | false | -| GOOGLE_CLOUD_STORAGE_PROJ_ID | The GCP project id for cloud storage & logging when `STORAGE_TYPE` is `gcs` | String | | -| GOOGLE_CLOUD_STORAGE_CREDENTIAL | The credential key file path when `STORAGE_TYPE` is `gcs` | String | | -| GOOGLE_CLOUD_STORAGE_BUCKET_NAME | Bucket name to hold the uploaded files when `STORAGE_TYPE` is `gcs` | String | | -| GOOGLE_CLOUD_UNIFORM_BUCKET_ACCESS | Enable uniform bucket level access when `STORAGE_TYPE` is `gcs` | Boolean | true | -| SHOW_COMMUNITY_NODES | Show nodes created by community | Boolean | | -| DISABLED_NODES | Hide nodes from UI (comma separated list of node names) | String | | +| Variable | Description | Type | Default | +| ---------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------ | ----------------------------------- | +| PORT | The HTTP port Flowise runs on | Number | 3000 | +| CORS_ORIGINS | The allowed origins for all cross-origin HTTP calls | String | | +| IFRAME_ORIGINS | The allowed origins for iframe src embedding | String | | +| FLOWISE_FILE_SIZE_LIMIT | Upload File Size Limit | String | 50mb | +| DEBUG | Print logs from components | Boolean | | +| LOG_PATH | Location where log files are stored | String | `your-path/Flowise/logs` | +| LOG_LEVEL | Different levels of logs | Enum String: `error`, `info`, `verbose`, `debug` | `info` | +| LOG_JSON_SPACES | Spaces to beautify JSON logs | | 2 | +| TOOL_FUNCTION_BUILTIN_DEP | NodeJS built-in modules to be used for Custom Tool or Function | String | | +| TOOL_FUNCTION_EXTERNAL_DEP | External modules to be used for Custom Tool or Function | String | | +| ALLOW_BUILTIN_DEP | Allow project dependencies to be used for Custom Tool or Function | Boolean | false | +| DATABASE_TYPE | Type of database to store the flowise data | Enum String: `sqlite`, `mysql`, `postgres` | `sqlite` | +| DATABASE_PATH | Location where database is saved (When DATABASE_TYPE is sqlite) | String | `your-home-dir/.flowise` | +| DATABASE_HOST | Host URL or IP address (When DATABASE_TYPE is not sqlite) | String | | +| DATABASE_PORT | Database port (When DATABASE_TYPE is not sqlite) | String | | +| DATABASE_USER | Database username (When DATABASE_TYPE is not sqlite) | String | | +| DATABASE_PASSWORD | Database password (When DATABASE_TYPE is not sqlite) | String | | +| DATABASE_NAME | Database name (When DATABASE_TYPE is not sqlite) | String | | +| DATABASE_SSL_KEY_BASE64 | Database SSL client cert in base64 (takes priority over DATABASE_SSL) | Boolean | false | +| DATABASE_SSL | Database connection overssl (When DATABASE_TYPE is postgre) | Boolean | false | +| SECRETKEY_PATH | Location where encryption key (used to encrypt/decrypt credentials) is saved | String | `your-path/Flowise/packages/server` | +| FLOWISE_SECRETKEY_OVERWRITE | Encryption key to be used instead of the key stored in SECRETKEY_PATH | String | | +| MODEL_LIST_CONFIG_JSON | File path to load list of models from your local config file | String | `/your_model_list_config_file_path` | +| STORAGE_TYPE | Type of storage for uploaded files. default is `local` | Enum String: `s3`, `local`, `gcs` | `local` | +| BLOB_STORAGE_PATH | Local folder path where uploaded files are stored when `STORAGE_TYPE` is `local` | String | `your-home-dir/.flowise/storage` | +| S3_STORAGE_BUCKET_NAME | Bucket name to hold the uploaded files when `STORAGE_TYPE` is `s3` | String | | +| S3_STORAGE_ACCESS_KEY_ID | AWS Access Key | String | | +| S3_STORAGE_SECRET_ACCESS_KEY | AWS Secret Key | String | | +| S3_STORAGE_REGION | Region for S3 bucket | String | | +| S3_ENDPOINT_URL | Custom Endpoint for S3 | String | | +| S3_FORCE_PATH_STYLE | Set this to true to force the request to use path-style addressing | Boolean | false | +| GOOGLE_CLOUD_STORAGE_PROJ_ID | The GCP project id for cloud storage & logging when `STORAGE_TYPE` is `gcs` | String | | +| GOOGLE_CLOUD_STORAGE_CREDENTIAL | The credential key file path when `STORAGE_TYPE` is `gcs` | String | | +| GOOGLE_CLOUD_STORAGE_BUCKET_NAME | Bucket name to hold the uploaded files when `STORAGE_TYPE` is `gcs` | String | | +| GOOGLE_CLOUD_UNIFORM_BUCKET_ACCESS | Enable uniform bucket level access when `STORAGE_TYPE` is `gcs` | Boolean | true | +| SHOW_COMMUNITY_NODES | Show nodes created by community | Boolean | | +| DISABLED_NODES | Hide nodes from UI (comma separated list of node names) | String | | +| TRUST_PROXY | Configure proxy trust settings for proper IP detection. Values: 'true' (trust all), 'false' (disable), number (hop count), or Express proxy values (e.g., 'loopback', 'linklocal', 'uniquelocal', IP addresses). [Learn More](https://expressjs.com/en/guide/behind-proxies.html) | Boolean/String/Number | true | You can also specify the env variables when using `npx`. For example: diff --git a/Dockerfile b/Dockerfile index a824b7f80..70041f41d 100644 --- a/Dockerfile +++ b/Dockerfile @@ -5,34 +5,41 @@ # docker run -d -p 3000:3000 flowise FROM node:20-alpine -RUN apk add --update libc6-compat python3 make g++ -# needed for pdfjs-dist -RUN apk add --no-cache build-base cairo-dev pango-dev -# Install Chromium -RUN apk add --no-cache chromium - -# Install curl for container-level health checks -# Fixes: https://github.com/FlowiseAI/Flowise/issues/4126 -RUN apk add --no-cache curl - -#install PNPM globaly -RUN npm install -g pnpm +# Install system dependencies and build tools +RUN apk update && \ + apk add --no-cache \ + libc6-compat \ + python3 \ + make \ + g++ \ + build-base \ + cairo-dev \ + pango-dev \ + chromium \ + curl && \ + npm install -g pnpm ENV PUPPETEER_SKIP_DOWNLOAD=true ENV PUPPETEER_EXECUTABLE_PATH=/usr/bin/chromium-browser ENV NODE_OPTIONS=--max-old-space-size=8192 -WORKDIR /usr/src +WORKDIR /usr/src/flowise # Copy app source COPY . . -RUN pnpm install +# Install dependencies and build +RUN pnpm install && \ + pnpm build -RUN pnpm build +# Give the node user ownership of the application files +RUN chown -R node:node . + +# Switch to non-root user (node user already exists in node:20-alpine) +USER node EXPOSE 3000 -CMD [ "pnpm", "start" ] +CMD [ "pnpm", "start" ] \ No newline at end of file diff --git a/LICENSE.md b/LICENSE.md index 808000018..68314426e 100644 --- a/LICENSE.md +++ b/LICENSE.md @@ -1,6 +1,14 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ +Copyright (c) 2023-present FlowiseAI, Inc. + +Portions of this software are licensed as follows: + +- All content that resides under https://github.com/FlowiseAI/Flowise/tree/main/packages/server/src/enterprise directory and files with explicit copyright notice such as [IdentityManager.ts](https://github.com/FlowiseAI/Flowise/tree/main/packages/server/src/IdentityManager.ts) are licensed under [Commercial License](https://github.com/FlowiseAI/Flowise/tree/main/packages/server/src/enterprise/LICENSE.md). +- All third party components incorporated into the FlowiseAI Software are licensed under the original license provided by the owner of the applicable component. +- Content outside of the above mentioned directories or restrictions above is available under the "Apache 2.0" license as defined below. + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION diff --git a/README.md b/README.md index d1c9b2da4..79f4f147e 100644 --- a/README.md +++ b/README.md @@ -5,6 +5,8 @@

+
+ [![Release Notes](https://img.shields.io/github/release/FlowiseAI/Flowise)](https://github.com/FlowiseAI/Flowise/releases) [![Discord](https://img.shields.io/discord/1087698854775881778?label=Discord&logo=discord)](https://discord.gg/jbaHfsRVBW) [![Twitter Follow](https://img.shields.io/twitter/follow/FlowiseAI?style=social)](https://twitter.com/FlowiseAI) @@ -13,10 +15,25 @@ English | [็น้ซ”ไธญๆ–‡](./i18n/README-TW.md) | [็ฎ€ไฝ“ไธญๆ–‡](./i18n/README-ZH.md) | [ๆ—ฅๆœฌ่ชž](./i18n/README-JA.md) | [ํ•œ๊ตญ์–ด](./i18n/README-KR.md) +
+

Build AI Agents, Visually

+## ๐Ÿ“š Table of Contents + +- [โšก Quick Start](#-quick-start) +- [๐Ÿณ Docker](#-docker) +- [๐Ÿ‘จโ€๐Ÿ’ป Developers](#-developers) +- [๐ŸŒฑ Env Variables](#-env-variables) +- [๐Ÿ“– Documentation](#-documentation) +- [๐ŸŒ Self Host](#-self-host) +- [โ˜๏ธ Flowise Cloud](#๏ธ-flowise-cloud) +- [๐Ÿ™‹ Support](#-support) +- [๐Ÿ™Œ Contributing](#-contributing) +- [๐Ÿ“„ License](#-license) + ## โšกQuick Start Download and Install [NodeJS](https://nodejs.org/en/download) >= 18.15.0 @@ -31,12 +48,6 @@ Download and Install [NodeJS](https://nodejs.org/en/download) >= 18.15.0 npx flowise start ``` - With username & password - - ```bash - npx flowise start --FLOWISE_USERNAME=user --FLOWISE_PASSWORD=1234 - ``` - 3. Open [http://localhost:3000](http://localhost:3000) ## ๐Ÿณ Docker @@ -53,9 +64,11 @@ Download and Install [NodeJS](https://nodejs.org/en/download) >= 18.15.0 ### Docker Image 1. Build the image locally: + ```bash docker build --no-cache -t flowise . ``` + 2. Run image: ```bash @@ -63,6 +76,7 @@ Download and Install [NodeJS](https://nodejs.org/en/download) >= 18.15.0 ``` 3. Stop image: + ```bash docker stop flowise ``` @@ -85,13 +99,13 @@ Flowise has 3 different modules in a single mono repository. ### Setup -1. Clone the repository +1. Clone the repository: ```bash git clone https://github.com/FlowiseAI/Flowise.git ``` -2. Go into repository folder +2. Go into repository folder: ```bash cd Flowise @@ -111,10 +125,24 @@ Flowise has 3 different modules in a single mono repository.
Exit code 134 (JavaScript heap out of memory) - If you get this error when running the above `build` script, try increasing the Node.js heap size and run the script again: + If you get this error when running the above `build` script, try increasing the Node.js heap size and run the script again: - export NODE_OPTIONS="--max-old-space-size=4096" - pnpm build + ```bash + # macOS / Linux / Git Bash + export NODE_OPTIONS="--max-old-space-size=4096" + + # Windows PowerShell + $env:NODE_OPTIONS="--max-old-space-size=4096" + + # Windows CMD + set NODE_OPTIONS=--max-old-space-size=4096 + ``` + + Then run: + + ```bash + pnpm build + ```
@@ -130,7 +158,7 @@ Flowise has 3 different modules in a single mono repository. - Create `.env` file and specify the `VITE_PORT` (refer to `.env.example`) in `packages/ui` - Create `.env` file and specify the `PORT` (refer to `.env.example`) in `packages/server` - - Run + - Run: ```bash pnpm dev @@ -138,22 +166,13 @@ Flowise has 3 different modules in a single mono repository. Any code changes will reload the app automatically on [http://localhost:8080](http://localhost:8080) -## ๐Ÿ”’ Authentication - -To enable app level authentication, add `FLOWISE_USERNAME` and `FLOWISE_PASSWORD` to the `.env` file in `packages/server`: - -``` -FLOWISE_USERNAME=user -FLOWISE_PASSWORD=1234 -``` - ## ๐ŸŒฑ Env Variables -Flowise support different environment variables to configure your instance. You can specify the following variables in the `.env` file inside `packages/server` folder. Read [more](https://github.com/FlowiseAI/Flowise/blob/main/CONTRIBUTING.md#-env-variables) +Flowise supports different environment variables to configure your instance. You can specify the following variables in the `.env` file inside `packages/server` folder. Read [more](https://github.com/FlowiseAI/Flowise/blob/main/CONTRIBUTING.md#-env-variables) ## ๐Ÿ“– Documentation -[Flowise Docs](https://docs.flowiseai.com/) +You can view the Flowise Docs [here](https://docs.flowiseai.com/) ## ๐ŸŒ Self Host @@ -171,6 +190,10 @@ Deploy Flowise self-hosted in your existing infrastructure, we support various [ [![Deploy on Railway](https://railway.app/button.svg)](https://railway.app/template/pn4G8S?referralCode=WVNPD9) + - [Northflank](https://northflank.com/stacks/deploy-flowiseai) + + [![Deploy to Northflank](https://assets.northflank.com/deploy_to_northflank_smm_36700fb050.svg)](https://northflank.com/stacks/deploy-flowiseai) + - [Render](https://docs.flowiseai.com/configuration/deployment/render) [![Deploy to Render](https://render.com/images/deploy-to-render-button.svg)](https://docs.flowiseai.com/configuration/deployment/render) @@ -195,11 +218,11 @@ Deploy Flowise self-hosted in your existing infrastructure, we support various [ ## โ˜๏ธ Flowise Cloud -[Get Started with Flowise Cloud](https://flowiseai.com/) +Get Started with [Flowise Cloud](https://flowiseai.com/). ## ๐Ÿ™‹ Support -Feel free to ask any questions, raise problems, and request new features in [discussion](https://github.com/FlowiseAI/Flowise/discussions) +Feel free to ask any questions, raise problems, and request new features in [Discussion](https://github.com/FlowiseAI/Flowise/discussions). ## ๐Ÿ™Œ Contributing @@ -207,9 +230,10 @@ Thanks go to these awesome contributors - +

+ +See [Contributing Guide](CONTRIBUTING.md). Reach out to us at [Discord](https://discord.gg/jbaHfsRVBW) if you have any questions or issues. -See [contributing guide](CONTRIBUTING.md). Reach out to us at [Discord](https://discord.gg/jbaHfsRVBW) if you have any questions or issues. [![Star History Chart](https://api.star-history.com/svg?repos=FlowiseAI/Flowise&type=Timeline)](https://star-history.com/#FlowiseAI/Flowise&Date) ## ๐Ÿ“„ License diff --git a/SECURITY.md b/SECURITY.md index 8d7455de9..6d8a12c2d 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -1,40 +1,38 @@ -### Responsible Disclosure Policy +### Responsible Disclosure Policy -At Flowise, we prioritize security and continuously work to safeguard our systems. However, vulnerabilities can still exist. If you identify a security issue, please report it to us so we can address it promptly. Your cooperation helps us better protect our platform and users. +At Flowise, we prioritize security and continuously work to safeguard our systems. However, vulnerabilities can still exist. If you identify a security issue, please report it to us so we can address it promptly. Your cooperation helps us better protect our platform and users. -### Vulnerabilities +### Out of scope vulnerabilities -The following types of issues are some of the most common vulnerabilities: +- Clickjacking on pages without sensitive actions +- CSRF on unauthenticated/logout/login pages +- Attacks requiring MITM (Man-in-the-Middle) or physical device access +- Social engineering attacks +- Activities that cause service disruption (DoS) +- Content spoofing and text injection without a valid attack vector +- Email spoofing +- Absence of DNSSEC, CAA, CSP headers +- Missing Secure or HTTP-only flag on non-sensitive cookies +- Deadlinks +- User enumeration -- Clickjacking on pages without sensitive actions -- CSRF on unauthenticated/logout/login pages -- Attacks requiring MITM (Man-in-the-Middle) or physical device access -- Social engineering attacks -- Activities that cause service disruption (DoS) -- Content spoofing and text injection without a valid attack vector -- Email spoofing -- Absence of DNSSEC, CAA, CSP headers -- Missing Secure or HTTP-only flag on non-sensitive cookies -- Deadlinks -- User enumeration +### Reporting Guidelines -### Reporting Guidelines +- Submit your findings to https://github.com/FlowiseAI/Flowise/security +- Provide clear details to help us reproduce and fix the issue quickly. -- Submit your findings to https://github.com/FlowiseAI/Flowise/security -- Provide clear details to help us reproduce and fix the issue quickly. +### Disclosure Guidelines -### Disclosure Guidelines +- Do not publicly disclose vulnerabilities until we have assessed, resolved, and notified affected users. +- If you plan to present your research (e.g., at a conference or in a blog), share a draft with us at least **30 days in advance** for review. +- Avoid including: + - Data from any Flowise customer projects + - Flowise user/customer information + - Details about Flowise employees, contractors, or partners -- Do not publicly disclose vulnerabilities until we have assessed, resolved, and notified affected users. -- If you plan to present your research (e.g., at a conference or in a blog), share a draft with us at least **30 days in advance** for review. -- Avoid including: - - Data from any Flowise customer projects - - Flowise user/customer information - - Details about Flowise employees, contractors, or partners +### Response to Reports -### Response to Reports +- We will acknowledge your report within **5 business days** and provide an estimated resolution timeline. +- Your report will be kept **confidential**, and your details will not be shared without your consent. -- We will acknowledge your report within **5 business days** and provide an estimated resolution timeline. -- Your report will be kept **confidential**, and your details will not be shared without your consent. - -We appreciate your efforts in helping us maintain a secure platform and look forward to working together to resolve any issues responsibly. +We appreciate your efforts in helping us maintain a secure platform and look forward to working together to resolve any issues responsibly. diff --git a/docker/.env.example b/docker/.env.example index 56ac56a80..2240edeb8 100644 --- a/docker/.env.example +++ b/docker/.env.example @@ -1,16 +1,12 @@ PORT=3000 + +# APIKEY_PATH=/your_apikey_path/.flowise # (will be deprecated by end of 2025) + +############################################################################################################ +############################################## DATABASE #################################################### +############################################################################################################ + DATABASE_PATH=/root/.flowise -APIKEY_PATH=/root/.flowise -SECRETKEY_PATH=/root/.flowise -LOG_PATH=/root/.flowise/logs -BLOB_STORAGE_PATH=/root/.flowise/storage - -# APIKEY_STORAGE_TYPE=json (json | db) - -# NUMBER_OF_PROXIES= 1 -# CORS_ORIGINS=* -# IFRAME_ORIGINS=* - # DATABASE_TYPE=postgres # DATABASE_PORT=5432 # DATABASE_HOST="" @@ -18,36 +14,43 @@ BLOB_STORAGE_PATH=/root/.flowise/storage # DATABASE_USER=root # DATABASE_PASSWORD=mypassword # DATABASE_SSL=true +# DATABASE_REJECT_UNAUTHORIZED=true # DATABASE_SSL_KEY_BASE64= + +############################################################################################################ +############################################## SECRET KEYS ################################################# +############################################################################################################ + # SECRETKEY_STORAGE_TYPE=local #(local | aws) -# SECRETKEY_PATH=/your_api_key_path/.flowise -# FLOWISE_SECRETKEY_OVERWRITE=myencryptionkey +SECRETKEY_PATH=/root/.flowise +# FLOWISE_SECRETKEY_OVERWRITE=myencryptionkey # (if you want to overwrite the secret key) # SECRETKEY_AWS_ACCESS_KEY= # SECRETKEY_AWS_SECRET_KEY= # SECRETKEY_AWS_REGION=us-west-2 +# SECRETKEY_AWS_NAME=FlowiseEncryptionKey -# FLOWISE_USERNAME=user -# FLOWISE_PASSWORD=1234 -# FLOWISE_SECRETKEY_OVERWRITE=myencryptionkey -# FLOWISE_FILE_SIZE_LIMIT=50mb + +############################################################################################################ +############################################## LOGGING ##################################################### +############################################################################################################ # DEBUG=true -# LOG_LEVEL=info (error | warn | info | verbose | debug) +LOG_PATH=/root/.flowise/logs +# LOG_LEVEL=info #(error | warn | info | verbose | debug) +# LOG_SANITIZE_BODY_FIELDS=password,pwd,pass,secret,token,apikey,api_key,accesstoken,access_token,refreshtoken,refresh_token,clientsecret,client_secret,privatekey,private_key,secretkey,secret_key,auth,authorization,credential,credentials +# LOG_SANITIZE_HEADER_FIELDS=authorization,x-api-key,x-auth-token,cookie # TOOL_FUNCTION_BUILTIN_DEP=crypto,fs # TOOL_FUNCTION_EXTERNAL_DEP=moment,lodash +# ALLOW_BUILTIN_DEP=false -# LANGCHAIN_TRACING_V2=true -# LANGCHAIN_ENDPOINT=https://api.smith.langchain.com -# LANGCHAIN_API_KEY=your_api_key -# LANGCHAIN_PROJECT=your_project -# Uncomment the following line to enable model list config, load the list of models from your local config file -# see https://raw.githubusercontent.com/FlowiseAI/Flowise/main/packages/components/models.json for the format -# MODEL_LIST_CONFIG_JSON=/your_model_list_config_file_path +############################################################################################################ +############################################## STORAGE ##################################################### +############################################################################################################ # STORAGE_TYPE=local (local | s3 | gcs) -# BLOB_STORAGE_PATH=/your_storage_path/.flowise/storage +BLOB_STORAGE_PATH=/root/.flowise/storage # S3_STORAGE_BUCKET_NAME=flowise # S3_STORAGE_ACCESS_KEY_ID= # S3_STORAGE_SECRET_ACCESS_KEY= @@ -59,12 +62,70 @@ BLOB_STORAGE_PATH=/root/.flowise/storage # GOOGLE_CLOUD_STORAGE_BUCKET_NAME= # GOOGLE_CLOUD_UNIFORM_BUCKET_ACCESS=true -# SHOW_COMMUNITY_NODES=true -# DISABLED_NODES=bufferMemory,chatOpenAI (comma separated list of node names to disable) -###################### -# METRICS COLLECTION -####################### +############################################################################################################ +############################################## SETTINGS #################################################### +############################################################################################################ + +# NUMBER_OF_PROXIES= 1 +# CORS_ORIGINS=* +# IFRAME_ORIGINS=* +# FLOWISE_FILE_SIZE_LIMIT=50mb +# SHOW_COMMUNITY_NODES=true +# DISABLE_FLOWISE_TELEMETRY=true +# DISABLED_NODES=bufferMemory,chatOpenAI (comma separated list of node names to disable) +# Uncomment the following line to enable model list config, load the list of models from your local config file +# see https://raw.githubusercontent.com/FlowiseAI/Flowise/main/packages/components/models.json for the format +# MODEL_LIST_CONFIG_JSON=/your_model_list_config_file_path + + +############################################################################################################ +############################################ AUTH PARAMETERS ############################################### +############################################################################################################ + +# APP_URL=http://localhost:3000 + +# SMTP_HOST=smtp.host.com +# SMTP_PORT=465 +# SMTP_USER=smtp_user +# SMTP_PASSWORD=smtp_password +# SMTP_SECURE=true +# ALLOW_UNAUTHORIZED_CERTS=false +# SENDER_EMAIL=team@example.com + +JWT_AUTH_TOKEN_SECRET='AABBCCDDAABBCCDDAABBCCDDAABBCCDDAABBCCDD' +JWT_REFRESH_TOKEN_SECRET='AABBCCDDAABBCCDDAABBCCDDAABBCCDDAABBCCDD' +JWT_ISSUER='ISSUER' +JWT_AUDIENCE='AUDIENCE' +JWT_TOKEN_EXPIRY_IN_MINUTES=360 +JWT_REFRESH_TOKEN_EXPIRY_IN_MINUTES=43200 +# EXPIRE_AUTH_TOKENS_ON_RESTART=true # (if you need to expire all tokens on app restart) +# EXPRESS_SESSION_SECRET=flowise +# SECURE_COOKIES= + +# INVITE_TOKEN_EXPIRY_IN_HOURS=24 +# PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS=15 +# PASSWORD_SALT_HASH_ROUNDS=10 +# TOKEN_HASH_SECRET='popcorn' + +# WORKSPACE_INVITE_TEMPLATE_PATH=/path/to/custom/workspace_invite.hbs + + +############################################################################################################ +############################################# ENTERPRISE ################################################### +############################################################################################################ + +# LICENSE_URL= +# FLOWISE_EE_LICENSE_KEY= +# OFFLINE= + + +############################################################################################################ +########################################### METRICS COLLECTION ############################################# +############################################################################################################ + +# POSTHOG_PUBLIC_API_KEY=your_posthog_public_api_key + # ENABLE_METRICS=false # METRICS_PROVIDER=prometheus # prometheus | open_telemetry # METRICS_INCLUDE_NODE_METRICS=true # default is true @@ -75,15 +136,21 @@ BLOB_STORAGE_PATH=/root/.flowise/storage # METRICS_OPEN_TELEMETRY_PROTOCOL=http # http | grpc | proto (default is http) # METRICS_OPEN_TELEMETRY_DEBUG=true # default is false -# Uncomment the following lines to enable global agent proxy -# see https://www.npmjs.com/package/global-agent for more details + +############################################################################################################ +############################################### PROXY ###################################################### +############################################################################################################ + +# Uncomment the following lines to enable global agent proxy, see https://www.npmjs.com/package/global-agent for more details # GLOBAL_AGENT_HTTP_PROXY=CorporateHttpProxyUrl # GLOBAL_AGENT_HTTPS_PROXY=CorporateHttpsProxyUrl # GLOBAL_AGENT_NO_PROXY=ExceptionHostsToBypassProxyIfNeeded -###################### -# QUEUE CONFIGURATION -####################### + +############################################################################################################ +########################################### QUEUE CONFIGURATION ############################################ +############################################################################################################ + # MODE=queue #(queue | main) # QUEUE_NAME=flowise-queue # QUEUE_REDIS_EVENT_STREAM_MAX_LEN=100000 @@ -100,4 +167,14 @@ BLOB_STORAGE_PATH=/root/.flowise/storage # REDIS_KEY= # REDIS_CA= # REDIS_KEEP_ALIVE= -# ENABLE_BULLMQ_DASHBOARD= \ No newline at end of file +# ENABLE_BULLMQ_DASHBOARD= + + +############################################################################################################ +############################################## SECURITY #################################################### +############################################################################################################ + +# HTTP_DENY_LIST= +# CUSTOM_MCP_SECURITY_CHECK=true +# CUSTOM_MCP_PROTOCOL=sse #(stdio | sse) +# TRUST_PROXY=true #(true | false | 1 | loopback| linklocal | uniquelocal | IP addresses | loopback, IP addresses) diff --git a/docker/README.md b/docker/README.md index 35d03142d..bcadd93d5 100644 --- a/docker/README.md +++ b/docker/README.md @@ -9,28 +9,43 @@ Starts Flowise from [DockerHub Image](https://hub.docker.com/r/flowiseai/flowise 3. Open [http://localhost:3000](http://localhost:3000) 4. You can bring the containers down by `docker compose stop` -## ๐Ÿ”’ Authentication - -1. Create `.env` file and specify the `PORT`, `FLOWISE_USERNAME`, and `FLOWISE_PASSWORD` (refer to `.env.example`) -2. Pass `FLOWISE_USERNAME` and `FLOWISE_PASSWORD` to the `docker-compose.yml` file: - ``` - environment: - - PORT=${PORT} - - FLOWISE_USERNAME=${FLOWISE_USERNAME} - - FLOWISE_PASSWORD=${FLOWISE_PASSWORD} - ``` -3. `docker compose up -d` -4. Open [http://localhost:3000](http://localhost:3000) -5. You can bring the containers down by `docker compose stop` - ## ๐ŸŒฑ Env Variables -If you like to persist your data (flows, logs, apikeys, credentials), set these variables in the `.env` file inside `docker` folder: +If you like to persist your data (flows, logs, credentials, storage), set these variables in the `.env` file inside `docker` folder: - DATABASE_PATH=/root/.flowise -- APIKEY_PATH=/root/.flowise - LOG_PATH=/root/.flowise/logs - SECRETKEY_PATH=/root/.flowise - BLOB_STORAGE_PATH=/root/.flowise/storage -Flowise also support different environment variables to configure your instance. Read [more](https://docs.flowiseai.com/environment-variables) +Flowise also support different environment variables to configure your instance. Read [more](https://docs.flowiseai.com/configuration/environment-variables) + +## Queue Mode: + +### Building from source: + +You can build the images for worker and main from scratch with: + +``` +docker compose -f docker-compose-queue-source.yml up -d +``` + +Monitor Health: + +``` +docker compose -f docker-compose-queue-source.yml ps +``` + +### From pre-built images: + +You can also use the pre-built images: + +``` +docker compose -f docker-compose-queue-prebuilt.yml up -d +``` + +Monitor Health: + +``` +docker compose -f docker-compose-queue-prebuilt.yml ps +``` diff --git a/docker/docker-compose-queue-prebuilt.yml b/docker/docker-compose-queue-prebuilt.yml new file mode 100644 index 000000000..6d6941590 --- /dev/null +++ b/docker/docker-compose-queue-prebuilt.yml @@ -0,0 +1,316 @@ +version: '3.1' + +services: + redis: + image: redis:alpine + container_name: flowise-redis + ports: + - '6379:6379' + volumes: + - redis_data:/data + networks: + - flowise-net + restart: always + + flowise: + image: flowiseai/flowise:latest + container_name: flowise-main + restart: always + ports: + - '${PORT:-3000}:${PORT:-3000}' + volumes: + - ~/.flowise:/root/.flowise + environment: + # --- Essential Flowise Vars --- + - PORT=${PORT:-3000} + - DATABASE_PATH=${DATABASE_PATH:-/root/.flowise} + - DATABASE_TYPE=${DATABASE_TYPE} + - DATABASE_PORT=${DATABASE_PORT} + - DATABASE_HOST=${DATABASE_HOST} + - DATABASE_NAME=${DATABASE_NAME} + - DATABASE_USER=${DATABASE_USER} + - DATABASE_PASSWORD=${DATABASE_PASSWORD} + - DATABASE_SSL=${DATABASE_SSL} + - DATABASE_SSL_KEY_BASE64=${DATABASE_SSL_KEY_BASE64} + + # SECRET KEYS + - SECRETKEY_STORAGE_TYPE=${SECRETKEY_STORAGE_TYPE} + - SECRETKEY_PATH=${SECRETKEY_PATH} + - FLOWISE_SECRETKEY_OVERWRITE=${FLOWISE_SECRETKEY_OVERWRITE} + - SECRETKEY_AWS_ACCESS_KEY=${SECRETKEY_AWS_ACCESS_KEY} + - SECRETKEY_AWS_SECRET_KEY=${SECRETKEY_AWS_SECRET_KEY} + - SECRETKEY_AWS_REGION=${SECRETKEY_AWS_REGION} + - SECRETKEY_AWS_NAME=${SECRETKEY_AWS_NAME} + + # LOGGING + - DEBUG=${DEBUG} + - LOG_PATH=${LOG_PATH} + - LOG_LEVEL=${LOG_LEVEL} + - LOG_SANITIZE_BODY_FIELDS=${LOG_SANITIZE_BODY_FIELDS} + - LOG_SANITIZE_HEADER_FIELDS=${LOG_SANITIZE_HEADER_FIELDS} + + # CUSTOM TOOL/FUNCTION DEPENDENCIES + - TOOL_FUNCTION_BUILTIN_DEP=${TOOL_FUNCTION_BUILTIN_DEP} + - TOOL_FUNCTION_EXTERNAL_DEP=${TOOL_FUNCTION_EXTERNAL_DEP} + - ALLOW_BUILTIN_DEP=${ALLOW_BUILTIN_DEP} + + # STORAGE + - STORAGE_TYPE=${STORAGE_TYPE} + - BLOB_STORAGE_PATH=${BLOB_STORAGE_PATH} + - S3_STORAGE_BUCKET_NAME=${S3_STORAGE_BUCKET_NAME} + - S3_STORAGE_ACCESS_KEY_ID=${S3_STORAGE_ACCESS_KEY_ID} + - S3_STORAGE_SECRET_ACCESS_KEY=${S3_STORAGE_SECRET_ACCESS_KEY} + - S3_STORAGE_REGION=${S3_STORAGE_REGION} + - S3_ENDPOINT_URL=${S3_ENDPOINT_URL} + - S3_FORCE_PATH_STYLE=${S3_FORCE_PATH_STYLE} + - GOOGLE_CLOUD_STORAGE_CREDENTIAL=${GOOGLE_CLOUD_STORAGE_CREDENTIAL} + - GOOGLE_CLOUD_STORAGE_PROJ_ID=${GOOGLE_CLOUD_STORAGE_PROJ_ID} + - GOOGLE_CLOUD_STORAGE_BUCKET_NAME=${GOOGLE_CLOUD_STORAGE_BUCKET_NAME} + - GOOGLE_CLOUD_UNIFORM_BUCKET_ACCESS=${GOOGLE_CLOUD_UNIFORM_BUCKET_ACCESS} + + # SETTINGS + - NUMBER_OF_PROXIES=${NUMBER_OF_PROXIES} + - CORS_ORIGINS=${CORS_ORIGINS} + - IFRAME_ORIGINS=${IFRAME_ORIGINS} + - FLOWISE_FILE_SIZE_LIMIT=${FLOWISE_FILE_SIZE_LIMIT} + - SHOW_COMMUNITY_NODES=${SHOW_COMMUNITY_NODES} + - DISABLE_FLOWISE_TELEMETRY=${DISABLE_FLOWISE_TELEMETRY} + - DISABLED_NODES=${DISABLED_NODES} + - MODEL_LIST_CONFIG_JSON=${MODEL_LIST_CONFIG_JSON} + + # AUTH PARAMETERS + - APP_URL=${APP_URL} + - JWT_AUTH_TOKEN_SECRET=${JWT_AUTH_TOKEN_SECRET} + - JWT_REFRESH_TOKEN_SECRET=${JWT_REFRESH_TOKEN_SECRET} + - JWT_ISSUER=${JWT_ISSUER} + - JWT_AUDIENCE=${JWT_AUDIENCE} + - JWT_TOKEN_EXPIRY_IN_MINUTES=${JWT_TOKEN_EXPIRY_IN_MINUTES} + - JWT_REFRESH_TOKEN_EXPIRY_IN_MINUTES=${JWT_REFRESH_TOKEN_EXPIRY_IN_MINUTES} + - EXPIRE_AUTH_TOKENS_ON_RESTART=${EXPIRE_AUTH_TOKENS_ON_RESTART} + - EXPRESS_SESSION_SECRET=${EXPRESS_SESSION_SECRET} + - PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS=${PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS} + - PASSWORD_SALT_HASH_ROUNDS=${PASSWORD_SALT_HASH_ROUNDS} + - TOKEN_HASH_SECRET=${TOKEN_HASH_SECRET} + - SECURE_COOKIES=${SECURE_COOKIES} + + # EMAIL + - SMTP_HOST=${SMTP_HOST} + - SMTP_PORT=${SMTP_PORT} + - SMTP_USER=${SMTP_USER} + - SMTP_PASSWORD=${SMTP_PASSWORD} + - SMTP_SECURE=${SMTP_SECURE} + - ALLOW_UNAUTHORIZED_CERTS=${ALLOW_UNAUTHORIZED_CERTS} + - SENDER_EMAIL=${SENDER_EMAIL} + + # ENTERPRISE + - LICENSE_URL=${LICENSE_URL} + - FLOWISE_EE_LICENSE_KEY=${FLOWISE_EE_LICENSE_KEY} + - OFFLINE=${OFFLINE} + - INVITE_TOKEN_EXPIRY_IN_HOURS=${INVITE_TOKEN_EXPIRY_IN_HOURS} + - WORKSPACE_INVITE_TEMPLATE_PATH=${WORKSPACE_INVITE_TEMPLATE_PATH} + + # METRICS COLLECTION + - POSTHOG_PUBLIC_API_KEY=${POSTHOG_PUBLIC_API_KEY} + - ENABLE_METRICS=${ENABLE_METRICS} + - METRICS_PROVIDER=${METRICS_PROVIDER} + - METRICS_INCLUDE_NODE_METRICS=${METRICS_INCLUDE_NODE_METRICS} + - METRICS_SERVICE_NAME=${METRICS_SERVICE_NAME} + - METRICS_OPEN_TELEMETRY_METRIC_ENDPOINT=${METRICS_OPEN_TELEMETRY_METRIC_ENDPOINT} + - METRICS_OPEN_TELEMETRY_PROTOCOL=${METRICS_OPEN_TELEMETRY_PROTOCOL} + - METRICS_OPEN_TELEMETRY_DEBUG=${METRICS_OPEN_TELEMETRY_DEBUG} + + # PROXY + - GLOBAL_AGENT_HTTP_PROXY=${GLOBAL_AGENT_HTTP_PROXY} + - GLOBAL_AGENT_HTTPS_PROXY=${GLOBAL_AGENT_HTTPS_PROXY} + - GLOBAL_AGENT_NO_PROXY=${GLOBAL_AGENT_NO_PROXY} + + # --- Queue Configuration (Main Instance) --- + - MODE=${MODE:-queue} + - QUEUE_NAME=${QUEUE_NAME:-flowise-queue} + - QUEUE_REDIS_EVENT_STREAM_MAX_LEN=${QUEUE_REDIS_EVENT_STREAM_MAX_LEN} + - WORKER_CONCURRENCY=${WORKER_CONCURRENCY} + - REMOVE_ON_AGE=${REMOVE_ON_AGE} + - REMOVE_ON_COUNT=${REMOVE_ON_COUNT} + - REDIS_URL=${REDIS_URL:-redis://redis:6379} + - REDIS_HOST=${REDIS_HOST} + - REDIS_PORT=${REDIS_PORT} + - REDIS_USERNAME=${REDIS_USERNAME} + - REDIS_PASSWORD=${REDIS_PASSWORD} + - REDIS_TLS=${REDIS_TLS} + - REDIS_CERT=${REDIS_CERT} + - REDIS_KEY=${REDIS_KEY} + - REDIS_CA=${REDIS_CA} + - REDIS_KEEP_ALIVE=${REDIS_KEEP_ALIVE} + - ENABLE_BULLMQ_DASHBOARD=${ENABLE_BULLMQ_DASHBOARD} + + # SECURITY + - CUSTOM_MCP_SECURITY_CHECK=${CUSTOM_MCP_SECURITY_CHECK} + - CUSTOM_MCP_PROTOCOL=${CUSTOM_MCP_PROTOCOL} + - HTTP_DENY_LIST=${HTTP_DENY_LIST} + - TRUST_PROXY=${TRUST_PROXY} + healthcheck: + test: ['CMD', 'curl', '-f', 'http://localhost:${PORT:-3000}/api/v1/ping'] + interval: 10s + timeout: 5s + retries: 5 + start_period: 30s + entrypoint: /bin/sh -c "sleep 3; flowise start" + depends_on: + - redis + networks: + - flowise-net + + flowise-worker: + image: flowiseai/flowise-worker:latest + container_name: flowise-worker + restart: always + volumes: + - ~/.flowise:/root/.flowise + environment: + # --- Essential Flowise Vars --- + - WORKER_PORT=${WORKER_PORT:-5566} + - DATABASE_PATH=${DATABASE_PATH:-/root/.flowise} + - DATABASE_TYPE=${DATABASE_TYPE} + - DATABASE_PORT=${DATABASE_PORT} + - DATABASE_HOST=${DATABASE_HOST} + - DATABASE_NAME=${DATABASE_NAME} + - DATABASE_USER=${DATABASE_USER} + - DATABASE_PASSWORD=${DATABASE_PASSWORD} + - DATABASE_SSL=${DATABASE_SSL} + - DATABASE_SSL_KEY_BASE64=${DATABASE_SSL_KEY_BASE64} + + # SECRET KEYS + - SECRETKEY_STORAGE_TYPE=${SECRETKEY_STORAGE_TYPE} + - SECRETKEY_PATH=${SECRETKEY_PATH} + - FLOWISE_SECRETKEY_OVERWRITE=${FLOWISE_SECRETKEY_OVERWRITE} + - SECRETKEY_AWS_ACCESS_KEY=${SECRETKEY_AWS_ACCESS_KEY} + - SECRETKEY_AWS_SECRET_KEY=${SECRETKEY_AWS_SECRET_KEY} + - SECRETKEY_AWS_REGION=${SECRETKEY_AWS_REGION} + - SECRETKEY_AWS_NAME=${SECRETKEY_AWS_NAME} + + # LOGGING + - DEBUG=${DEBUG} + - LOG_PATH=${LOG_PATH} + - LOG_LEVEL=${LOG_LEVEL} + - LOG_SANITIZE_BODY_FIELDS=${LOG_SANITIZE_BODY_FIELDS} + - LOG_SANITIZE_HEADER_FIELDS=${LOG_SANITIZE_HEADER_FIELDS} + + # CUSTOM TOOL/FUNCTION DEPENDENCIES + - TOOL_FUNCTION_BUILTIN_DEP=${TOOL_FUNCTION_BUILTIN_DEP} + - TOOL_FUNCTION_EXTERNAL_DEP=${TOOL_FUNCTION_EXTERNAL_DEP} + - ALLOW_BUILTIN_DEP=${ALLOW_BUILTIN_DEP} + + # STORAGE + - STORAGE_TYPE=${STORAGE_TYPE} + - BLOB_STORAGE_PATH=${BLOB_STORAGE_PATH} + - S3_STORAGE_BUCKET_NAME=${S3_STORAGE_BUCKET_NAME} + - S3_STORAGE_ACCESS_KEY_ID=${S3_STORAGE_ACCESS_KEY_ID} + - S3_STORAGE_SECRET_ACCESS_KEY=${S3_STORAGE_SECRET_ACCESS_KEY} + - S3_STORAGE_REGION=${S3_STORAGE_REGION} + - S3_ENDPOINT_URL=${S3_ENDPOINT_URL} + - S3_FORCE_PATH_STYLE=${S3_FORCE_PATH_STYLE} + - GOOGLE_CLOUD_STORAGE_CREDENTIAL=${GOOGLE_CLOUD_STORAGE_CREDENTIAL} + - GOOGLE_CLOUD_STORAGE_PROJ_ID=${GOOGLE_CLOUD_STORAGE_PROJ_ID} + - GOOGLE_CLOUD_STORAGE_BUCKET_NAME=${GOOGLE_CLOUD_STORAGE_BUCKET_NAME} + - GOOGLE_CLOUD_UNIFORM_BUCKET_ACCESS=${GOOGLE_CLOUD_UNIFORM_BUCKET_ACCESS} + + # SETTINGS + - NUMBER_OF_PROXIES=${NUMBER_OF_PROXIES} + - CORS_ORIGINS=${CORS_ORIGINS} + - IFRAME_ORIGINS=${IFRAME_ORIGINS} + - FLOWISE_FILE_SIZE_LIMIT=${FLOWISE_FILE_SIZE_LIMIT} + - SHOW_COMMUNITY_NODES=${SHOW_COMMUNITY_NODES} + - DISABLE_FLOWISE_TELEMETRY=${DISABLE_FLOWISE_TELEMETRY} + - DISABLED_NODES=${DISABLED_NODES} + - MODEL_LIST_CONFIG_JSON=${MODEL_LIST_CONFIG_JSON} + + # AUTH PARAMETERS + - APP_URL=${APP_URL} + - JWT_AUTH_TOKEN_SECRET=${JWT_AUTH_TOKEN_SECRET} + - JWT_REFRESH_TOKEN_SECRET=${JWT_REFRESH_TOKEN_SECRET} + - JWT_ISSUER=${JWT_ISSUER} + - JWT_AUDIENCE=${JWT_AUDIENCE} + - JWT_TOKEN_EXPIRY_IN_MINUTES=${JWT_TOKEN_EXPIRY_IN_MINUTES} + - JWT_REFRESH_TOKEN_EXPIRY_IN_MINUTES=${JWT_REFRESH_TOKEN_EXPIRY_IN_MINUTES} + - EXPIRE_AUTH_TOKENS_ON_RESTART=${EXPIRE_AUTH_TOKENS_ON_RESTART} + - EXPRESS_SESSION_SECRET=${EXPRESS_SESSION_SECRET} + - PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS=${PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS} + - PASSWORD_SALT_HASH_ROUNDS=${PASSWORD_SALT_HASH_ROUNDS} + - TOKEN_HASH_SECRET=${TOKEN_HASH_SECRET} + - SECURE_COOKIES=${SECURE_COOKIES} + + # EMAIL + - SMTP_HOST=${SMTP_HOST} + - SMTP_PORT=${SMTP_PORT} + - SMTP_USER=${SMTP_USER} + - SMTP_PASSWORD=${SMTP_PASSWORD} + - SMTP_SECURE=${SMTP_SECURE} + - ALLOW_UNAUTHORIZED_CERTS=${ALLOW_UNAUTHORIZED_CERTS} + - SENDER_EMAIL=${SENDER_EMAIL} + + # ENTERPRISE + - LICENSE_URL=${LICENSE_URL} + - FLOWISE_EE_LICENSE_KEY=${FLOWISE_EE_LICENSE_KEY} + - OFFLINE=${OFFLINE} + - INVITE_TOKEN_EXPIRY_IN_HOURS=${INVITE_TOKEN_EXPIRY_IN_HOURS} + - WORKSPACE_INVITE_TEMPLATE_PATH=${WORKSPACE_INVITE_TEMPLATE_PATH} + + # METRICS COLLECTION + - POSTHOG_PUBLIC_API_KEY=${POSTHOG_PUBLIC_API_KEY} + - ENABLE_METRICS=${ENABLE_METRICS} + - METRICS_PROVIDER=${METRICS_PROVIDER} + - METRICS_INCLUDE_NODE_METRICS=${METRICS_INCLUDE_NODE_METRICS} + - METRICS_SERVICE_NAME=${METRICS_SERVICE_NAME} + - METRICS_OPEN_TELEMETRY_METRIC_ENDPOINT=${METRICS_OPEN_TELEMETRY_METRIC_ENDPOINT} + - METRICS_OPEN_TELEMETRY_PROTOCOL=${METRICS_OPEN_TELEMETRY_PROTOCOL} + - METRICS_OPEN_TELEMETRY_DEBUG=${METRICS_OPEN_TELEMETRY_DEBUG} + + # PROXY + - GLOBAL_AGENT_HTTP_PROXY=${GLOBAL_AGENT_HTTP_PROXY} + - GLOBAL_AGENT_HTTPS_PROXY=${GLOBAL_AGENT_HTTPS_PROXY} + - GLOBAL_AGENT_NO_PROXY=${GLOBAL_AGENT_NO_PROXY} + + # --- Queue Configuration (Worker Instance) --- + - MODE=${MODE:-queue} + - QUEUE_NAME=${QUEUE_NAME:-flowise-queue} + - QUEUE_REDIS_EVENT_STREAM_MAX_LEN=${QUEUE_REDIS_EVENT_STREAM_MAX_LEN} + - WORKER_CONCURRENCY=${WORKER_CONCURRENCY} + - REMOVE_ON_AGE=${REMOVE_ON_AGE} + - REMOVE_ON_COUNT=${REMOVE_ON_COUNT} + - REDIS_URL=${REDIS_URL:-redis://redis:6379} + - REDIS_HOST=${REDIS_HOST} + - REDIS_PORT=${REDIS_PORT} + - REDIS_USERNAME=${REDIS_USERNAME} + - REDIS_PASSWORD=${REDIS_PASSWORD} + - REDIS_TLS=${REDIS_TLS} + - REDIS_CERT=${REDIS_CERT} + - REDIS_KEY=${REDIS_KEY} + - REDIS_CA=${REDIS_CA} + - REDIS_KEEP_ALIVE=${REDIS_KEEP_ALIVE} + - ENABLE_BULLMQ_DASHBOARD=${ENABLE_BULLMQ_DASHBOARD} + + # SECURITY + - CUSTOM_MCP_SECURITY_CHECK=${CUSTOM_MCP_SECURITY_CHECK} + - CUSTOM_MCP_PROTOCOL=${CUSTOM_MCP_PROTOCOL} + - HTTP_DENY_LIST=${HTTP_DENY_LIST} + - TRUST_PROXY=${TRUST_PROXY} + healthcheck: + test: ['CMD', 'curl', '-f', 'http://localhost:${WORKER_PORT:-5566}/healthz'] + interval: 10s + timeout: 5s + retries: 5 + start_period: 30s + entrypoint: /bin/sh -c "node /app/healthcheck/healthcheck.js & sleep 5 && pnpm run start-worker" + depends_on: + - redis + - flowise + networks: + - flowise-net + +volumes: + redis_data: + driver: local + +networks: + flowise-net: + driver: bridge diff --git a/docker/docker-compose-queue-source.yml b/docker/docker-compose-queue-source.yml new file mode 100644 index 000000000..a95608e5c --- /dev/null +++ b/docker/docker-compose-queue-source.yml @@ -0,0 +1,71 @@ +version: '3.1' + +services: + redis: + image: redis:alpine + container_name: flowise-redis + ports: + - '6379:6379' + volumes: + - redis_data:/data + networks: + - flowise-net + + flowise: + container_name: flowise-main + build: + context: .. # Build using the Dockerfile in the root directory + dockerfile: docker/Dockerfile + ports: + - '${PORT}:${PORT}' + volumes: + # Mount local .flowise to container's default location + - ../.flowise:/root/.flowise + environment: + # --- Essential Flowise Vars --- + - PORT=${PORT:-3000} + - DATABASE_PATH=/root/.flowise + - SECRETKEY_PATH=/root/.flowise + - LOG_PATH=/root/.flowise/logs + - BLOB_STORAGE_PATH=/root/.flowise/storage + # --- Queue Vars (Main Instance) --- + - MODE=queue + - QUEUE_NAME=flowise-queue # Ensure this matches worker + - REDIS_URL=redis://redis:6379 # Use service name 'redis' + depends_on: + - redis + networks: + - flowise-net + + flowise-worker: + container_name: flowise-worker + build: + context: .. # Build context is still the root + dockerfile: docker/worker/Dockerfile # Ensure this path is correct + volumes: + # Mount same local .flowise to worker + - ../.flowise:/root/.flowise + environment: + # --- Essential Flowise Vars --- + - WORKER_PORT=${WORKER_PORT:-5566} # Port for worker healthcheck + - DATABASE_PATH=/root/.flowise + - SECRETKEY_PATH=/root/.flowise + - LOG_PATH=/root/.flowise/logs + - BLOB_STORAGE_PATH=/root/.flowise/storage + # --- Queue Vars (Main Instance) --- + - MODE=queue + - QUEUE_NAME=flowise-queue # Ensure this matches worker + - REDIS_URL=redis://redis:6379 # Use service name 'redis' + depends_on: + - redis + - flowise + networks: + - flowise-net + +volumes: + redis_data: + driver: local + +networks: + flowise-net: + driver: bridge diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 3e5584863..e43283b15 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -2,16 +2,12 @@ version: '3.1' services: flowise: - image: flowiseai/flowise + image: flowiseai/flowise:latest restart: always environment: - PORT=${PORT} - - CORS_ORIGINS=${CORS_ORIGINS} - - IFRAME_ORIGINS=${IFRAME_ORIGINS} - - FLOWISE_USERNAME=${FLOWISE_USERNAME} - - FLOWISE_PASSWORD=${FLOWISE_PASSWORD} - - FLOWISE_FILE_SIZE_LIMIT=${FLOWISE_FILE_SIZE_LIMIT} - - DEBUG=${DEBUG} + + # DATABASE - DATABASE_PATH=${DATABASE_PATH} - DATABASE_TYPE=${DATABASE_TYPE} - DATABASE_PORT=${DATABASE_PORT} @@ -21,35 +17,122 @@ services: - DATABASE_PASSWORD=${DATABASE_PASSWORD} - DATABASE_SSL=${DATABASE_SSL} - DATABASE_SSL_KEY_BASE64=${DATABASE_SSL_KEY_BASE64} - - APIKEY_STORAGE_TYPE=${APIKEY_STORAGE_TYPE} - - APIKEY_PATH=${APIKEY_PATH} + + # SECRET KEYS + - SECRETKEY_STORAGE_TYPE=${SECRETKEY_STORAGE_TYPE} - SECRETKEY_PATH=${SECRETKEY_PATH} - FLOWISE_SECRETKEY_OVERWRITE=${FLOWISE_SECRETKEY_OVERWRITE} - - LOG_LEVEL=${LOG_LEVEL} + - SECRETKEY_AWS_ACCESS_KEY=${SECRETKEY_AWS_ACCESS_KEY} + - SECRETKEY_AWS_SECRET_KEY=${SECRETKEY_AWS_SECRET_KEY} + - SECRETKEY_AWS_REGION=${SECRETKEY_AWS_REGION} + - SECRETKEY_AWS_NAME=${SECRETKEY_AWS_NAME} + + # LOGGING + - DEBUG=${DEBUG} - LOG_PATH=${LOG_PATH} + - LOG_LEVEL=${LOG_LEVEL} + - LOG_SANITIZE_BODY_FIELDS=${LOG_SANITIZE_BODY_FIELDS} + - LOG_SANITIZE_HEADER_FIELDS=${LOG_SANITIZE_HEADER_FIELDS} + + # CUSTOM TOOL/FUNCTION DEPENDENCIES + - TOOL_FUNCTION_BUILTIN_DEP=${TOOL_FUNCTION_BUILTIN_DEP} + - TOOL_FUNCTION_EXTERNAL_DEP=${TOOL_FUNCTION_EXTERNAL_DEP} + - ALLOW_BUILTIN_DEP=${ALLOW_BUILTIN_DEP} + + # STORAGE + - STORAGE_TYPE=${STORAGE_TYPE} - BLOB_STORAGE_PATH=${BLOB_STORAGE_PATH} + - S3_STORAGE_BUCKET_NAME=${S3_STORAGE_BUCKET_NAME} + - S3_STORAGE_ACCESS_KEY_ID=${S3_STORAGE_ACCESS_KEY_ID} + - S3_STORAGE_SECRET_ACCESS_KEY=${S3_STORAGE_SECRET_ACCESS_KEY} + - S3_STORAGE_REGION=${S3_STORAGE_REGION} + - S3_ENDPOINT_URL=${S3_ENDPOINT_URL} + - S3_FORCE_PATH_STYLE=${S3_FORCE_PATH_STYLE} + - GOOGLE_CLOUD_STORAGE_CREDENTIAL=${GOOGLE_CLOUD_STORAGE_CREDENTIAL} + - GOOGLE_CLOUD_STORAGE_PROJ_ID=${GOOGLE_CLOUD_STORAGE_PROJ_ID} + - GOOGLE_CLOUD_STORAGE_BUCKET_NAME=${GOOGLE_CLOUD_STORAGE_BUCKET_NAME} + - GOOGLE_CLOUD_UNIFORM_BUCKET_ACCESS=${GOOGLE_CLOUD_UNIFORM_BUCKET_ACCESS} + + # SETTINGS + - NUMBER_OF_PROXIES=${NUMBER_OF_PROXIES} + - CORS_ORIGINS=${CORS_ORIGINS} + - IFRAME_ORIGINS=${IFRAME_ORIGINS} + - FLOWISE_FILE_SIZE_LIMIT=${FLOWISE_FILE_SIZE_LIMIT} + - SHOW_COMMUNITY_NODES=${SHOW_COMMUNITY_NODES} + - DISABLE_FLOWISE_TELEMETRY=${DISABLE_FLOWISE_TELEMETRY} + - DISABLED_NODES=${DISABLED_NODES} - MODEL_LIST_CONFIG_JSON=${MODEL_LIST_CONFIG_JSON} + + # AUTH PARAMETERS + - APP_URL=${APP_URL} + - JWT_AUTH_TOKEN_SECRET=${JWT_AUTH_TOKEN_SECRET} + - JWT_REFRESH_TOKEN_SECRET=${JWT_REFRESH_TOKEN_SECRET} + - JWT_ISSUER=${JWT_ISSUER} + - JWT_AUDIENCE=${JWT_AUDIENCE} + - JWT_TOKEN_EXPIRY_IN_MINUTES=${JWT_TOKEN_EXPIRY_IN_MINUTES} + - JWT_REFRESH_TOKEN_EXPIRY_IN_MINUTES=${JWT_REFRESH_TOKEN_EXPIRY_IN_MINUTES} + - EXPIRE_AUTH_TOKENS_ON_RESTART=${EXPIRE_AUTH_TOKENS_ON_RESTART} + - EXPRESS_SESSION_SECRET=${EXPRESS_SESSION_SECRET} + - PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS=${PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS} + - PASSWORD_SALT_HASH_ROUNDS=${PASSWORD_SALT_HASH_ROUNDS} + - TOKEN_HASH_SECRET=${TOKEN_HASH_SECRET} + - SECURE_COOKIES=${SECURE_COOKIES} + + # EMAIL + - SMTP_HOST=${SMTP_HOST} + - SMTP_PORT=${SMTP_PORT} + - SMTP_USER=${SMTP_USER} + - SMTP_PASSWORD=${SMTP_PASSWORD} + - SMTP_SECURE=${SMTP_SECURE} + - ALLOW_UNAUTHORIZED_CERTS=${ALLOW_UNAUTHORIZED_CERTS} + - SENDER_EMAIL=${SENDER_EMAIL} + + # ENTERPRISE + - LICENSE_URL=${LICENSE_URL} + - FLOWISE_EE_LICENSE_KEY=${FLOWISE_EE_LICENSE_KEY} + - OFFLINE=${OFFLINE} + - INVITE_TOKEN_EXPIRY_IN_HOURS=${INVITE_TOKEN_EXPIRY_IN_HOURS} + - WORKSPACE_INVITE_TEMPLATE_PATH=${WORKSPACE_INVITE_TEMPLATE_PATH} + + # METRICS COLLECTION + - POSTHOG_PUBLIC_API_KEY=${POSTHOG_PUBLIC_API_KEY} + - ENABLE_METRICS=${ENABLE_METRICS} + - METRICS_PROVIDER=${METRICS_PROVIDER} + - METRICS_INCLUDE_NODE_METRICS=${METRICS_INCLUDE_NODE_METRICS} + - METRICS_SERVICE_NAME=${METRICS_SERVICE_NAME} + - METRICS_OPEN_TELEMETRY_METRIC_ENDPOINT=${METRICS_OPEN_TELEMETRY_METRIC_ENDPOINT} + - METRICS_OPEN_TELEMETRY_PROTOCOL=${METRICS_OPEN_TELEMETRY_PROTOCOL} + - METRICS_OPEN_TELEMETRY_DEBUG=${METRICS_OPEN_TELEMETRY_DEBUG} + + # PROXY - GLOBAL_AGENT_HTTP_PROXY=${GLOBAL_AGENT_HTTP_PROXY} - GLOBAL_AGENT_HTTPS_PROXY=${GLOBAL_AGENT_HTTPS_PROXY} - GLOBAL_AGENT_NO_PROXY=${GLOBAL_AGENT_NO_PROXY} - - DISABLED_NODES=${DISABLED_NODES} + + # QUEUE CONFIGURATION - MODE=${MODE} - - WORKER_CONCURRENCY=${WORKER_CONCURRENCY} - QUEUE_NAME=${QUEUE_NAME} - QUEUE_REDIS_EVENT_STREAM_MAX_LEN=${QUEUE_REDIS_EVENT_STREAM_MAX_LEN} + - WORKER_CONCURRENCY=${WORKER_CONCURRENCY} - REMOVE_ON_AGE=${REMOVE_ON_AGE} - REMOVE_ON_COUNT=${REMOVE_ON_COUNT} - REDIS_URL=${REDIS_URL} - REDIS_HOST=${REDIS_HOST} - REDIS_PORT=${REDIS_PORT} - - REDIS_PASSWORD=${REDIS_PASSWORD} - REDIS_USERNAME=${REDIS_USERNAME} + - REDIS_PASSWORD=${REDIS_PASSWORD} - REDIS_TLS=${REDIS_TLS} - REDIS_CERT=${REDIS_CERT} - REDIS_KEY=${REDIS_KEY} - REDIS_CA=${REDIS_CA} - REDIS_KEEP_ALIVE=${REDIS_KEEP_ALIVE} - ENABLE_BULLMQ_DASHBOARD=${ENABLE_BULLMQ_DASHBOARD} + + # SECURITY + - CUSTOM_MCP_SECURITY_CHECK=${CUSTOM_MCP_SECURITY_CHECK} + - CUSTOM_MCP_PROTOCOL=${CUSTOM_MCP_PROTOCOL} + - HTTP_DENY_LIST=${HTTP_DENY_LIST} + - TRUST_PROXY=${TRUST_PROXY} ports: - '${PORT}:${PORT}' healthcheck: diff --git a/docker/worker/.env.example b/docker/worker/.env.example new file mode 100644 index 000000000..0e4b0c0dc --- /dev/null +++ b/docker/worker/.env.example @@ -0,0 +1,180 @@ +WORKER_PORT=5566 + +# APIKEY_PATH=/your_apikey_path/.flowise # (will be deprecated by end of 2025) + +############################################################################################################ +############################################## DATABASE #################################################### +############################################################################################################ + +DATABASE_PATH=/root/.flowise +# DATABASE_TYPE=postgres +# DATABASE_PORT=5432 +# DATABASE_HOST="" +# DATABASE_NAME=flowise +# DATABASE_USER=root +# DATABASE_PASSWORD=mypassword +# DATABASE_SSL=true +# DATABASE_REJECT_UNAUTHORIZED=true +# DATABASE_SSL_KEY_BASE64= + + +############################################################################################################ +############################################## SECRET KEYS ################################################# +############################################################################################################ + +# SECRETKEY_STORAGE_TYPE=local #(local | aws) +SECRETKEY_PATH=/root/.flowise +# FLOWISE_SECRETKEY_OVERWRITE=myencryptionkey # (if you want to overwrite the secret key) +# SECRETKEY_AWS_ACCESS_KEY= +# SECRETKEY_AWS_SECRET_KEY= +# SECRETKEY_AWS_REGION=us-west-2 +# SECRETKEY_AWS_NAME=FlowiseEncryptionKey + + +############################################################################################################ +############################################## LOGGING ##################################################### +############################################################################################################ + +# DEBUG=true +LOG_PATH=/root/.flowise/logs +# LOG_LEVEL=info #(error | warn | info | verbose | debug) +# LOG_SANITIZE_BODY_FIELDS=password,pwd,pass,secret,token,apikey,api_key,accesstoken,access_token,refreshtoken,refresh_token,clientsecret,client_secret,privatekey,private_key,secretkey,secret_key,auth,authorization,credential,credentials +# LOG_SANITIZE_HEADER_FIELDS=authorization,x-api-key,x-auth-token,cookie +# TOOL_FUNCTION_BUILTIN_DEP=crypto,fs +# TOOL_FUNCTION_EXTERNAL_DEP=moment,lodash +# ALLOW_BUILTIN_DEP=false + + +############################################################################################################ +############################################## STORAGE ##################################################### +############################################################################################################ + +# STORAGE_TYPE=local (local | s3 | gcs) +BLOB_STORAGE_PATH=/root/.flowise/storage +# S3_STORAGE_BUCKET_NAME=flowise +# S3_STORAGE_ACCESS_KEY_ID= +# S3_STORAGE_SECRET_ACCESS_KEY= +# S3_STORAGE_REGION=us-west-2 +# S3_ENDPOINT_URL= +# S3_FORCE_PATH_STYLE=false +# GOOGLE_CLOUD_STORAGE_CREDENTIAL=/the/keyfilename/path +# GOOGLE_CLOUD_STORAGE_PROJ_ID= +# GOOGLE_CLOUD_STORAGE_BUCKET_NAME= +# GOOGLE_CLOUD_UNIFORM_BUCKET_ACCESS=true + + +############################################################################################################ +############################################## SETTINGS #################################################### +############################################################################################################ + +# NUMBER_OF_PROXIES= 1 +# CORS_ORIGINS=* +# IFRAME_ORIGINS=* +# FLOWISE_FILE_SIZE_LIMIT=50mb +# SHOW_COMMUNITY_NODES=true +# DISABLE_FLOWISE_TELEMETRY=true +# DISABLED_NODES=bufferMemory,chatOpenAI (comma separated list of node names to disable) +# Uncomment the following line to enable model list config, load the list of models from your local config file +# see https://raw.githubusercontent.com/FlowiseAI/Flowise/main/packages/components/models.json for the format +# MODEL_LIST_CONFIG_JSON=/your_model_list_config_file_path + + +############################################################################################################ +############################################ AUTH PARAMETERS ############################################### +############################################################################################################ + +# APP_URL=http://localhost:3000 + +# SMTP_HOST=smtp.host.com +# SMTP_PORT=465 +# SMTP_USER=smtp_user +# SMTP_PASSWORD=smtp_password +# SMTP_SECURE=true +# ALLOW_UNAUTHORIZED_CERTS=false +# SENDER_EMAIL=team@example.com + +JWT_AUTH_TOKEN_SECRET='AABBCCDDAABBCCDDAABBCCDDAABBCCDDAABBCCDD' +JWT_REFRESH_TOKEN_SECRET='AABBCCDDAABBCCDDAABBCCDDAABBCCDDAABBCCDD' +JWT_ISSUER='ISSUER' +JWT_AUDIENCE='AUDIENCE' +JWT_TOKEN_EXPIRY_IN_MINUTES=360 +JWT_REFRESH_TOKEN_EXPIRY_IN_MINUTES=43200 +# EXPIRE_AUTH_TOKENS_ON_RESTART=true # (if you need to expire all tokens on app restart) +# EXPRESS_SESSION_SECRET=flowise +# SECURE_COOKIES= + +# INVITE_TOKEN_EXPIRY_IN_HOURS=24 +# PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS=15 +# PASSWORD_SALT_HASH_ROUNDS=10 +# TOKEN_HASH_SECRET='popcorn' + +# WORKSPACE_INVITE_TEMPLATE_PATH=/path/to/custom/workspace_invite.hbs + + +############################################################################################################ +############################################# ENTERPRISE ################################################### +############################################################################################################ + +# LICENSE_URL= +# FLOWISE_EE_LICENSE_KEY= +# OFFLINE= + + +############################################################################################################ +########################################### METRICS COLLECTION ############################################# +############################################################################################################ + +# POSTHOG_PUBLIC_API_KEY=your_posthog_public_api_key + +# ENABLE_METRICS=false +# METRICS_PROVIDER=prometheus # prometheus | open_telemetry +# METRICS_INCLUDE_NODE_METRICS=true # default is true +# METRICS_SERVICE_NAME=FlowiseAI + +# ONLY NEEDED if METRICS_PROVIDER=open_telemetry +# METRICS_OPEN_TELEMETRY_METRIC_ENDPOINT=http://localhost:4318/v1/metrics +# METRICS_OPEN_TELEMETRY_PROTOCOL=http # http | grpc | proto (default is http) +# METRICS_OPEN_TELEMETRY_DEBUG=true # default is false + + +############################################################################################################ +############################################### PROXY ###################################################### +############################################################################################################ + +# Uncomment the following lines to enable global agent proxy, see https://www.npmjs.com/package/global-agent for more details +# GLOBAL_AGENT_HTTP_PROXY=CorporateHttpProxyUrl +# GLOBAL_AGENT_HTTPS_PROXY=CorporateHttpsProxyUrl +# GLOBAL_AGENT_NO_PROXY=ExceptionHostsToBypassProxyIfNeeded + + +############################################################################################################ +########################################### QUEUE CONFIGURATION ############################################ +############################################################################################################ + +# MODE=queue #(queue | main) +# QUEUE_NAME=flowise-queue +# QUEUE_REDIS_EVENT_STREAM_MAX_LEN=100000 +# WORKER_CONCURRENCY=100000 +# REMOVE_ON_AGE=86400 +# REMOVE_ON_COUNT=10000 +# REDIS_URL= +# REDIS_HOST=localhost +# REDIS_PORT=6379 +# REDIS_USERNAME= +# REDIS_PASSWORD= +# REDIS_TLS= +# REDIS_CERT= +# REDIS_KEY= +# REDIS_CA= +# REDIS_KEEP_ALIVE= +# ENABLE_BULLMQ_DASHBOARD= + + +############################################################################################################ +############################################## SECURITY #################################################### +############################################################################################################ + +# HTTP_DENY_LIST= +# CUSTOM_MCP_SECURITY_CHECK=true +# CUSTOM_MCP_PROTOCOL=sse #(stdio | sse) +# TRUST_PROXY=true #(true | false | 1 | loopback| linklocal | uniquelocal | IP addresses | loopback, IP addresses) diff --git a/docker/worker/Dockerfile b/docker/worker/Dockerfile new file mode 100644 index 000000000..8a2c749d4 --- /dev/null +++ b/docker/worker/Dockerfile @@ -0,0 +1,49 @@ +FROM node:20-alpine + +RUN apk add --update libc6-compat python3 make g++ +# needed for pdfjs-dist +RUN apk add --no-cache build-base cairo-dev pango-dev + +# Install Chromium and curl for container-level health checks +RUN apk add --no-cache chromium curl + +#install PNPM globally +RUN npm install -g pnpm + +ENV PUPPETEER_SKIP_DOWNLOAD=true +ENV PUPPETEER_EXECUTABLE_PATH=/usr/bin/chromium-browser + +ENV NODE_OPTIONS=--max-old-space-size=8192 + +WORKDIR /usr/src + +# Copy app source +COPY . . + +RUN pnpm install + +RUN pnpm build + +# --- Healthcheck Setup --- + +WORKDIR /app/healthcheck + +COPY docker/worker/healthcheck/package.json . + +RUN npm install --omit=dev + +COPY docker/worker/healthcheck/healthcheck.js . + +# --- End Healthcheck Setup --- + +# Set the main working directory back +WORKDIR /usr/src + +# Environment variables for port configuration +ENV WORKER_PORT=5566 + +# Expose port (can be overridden by env var) +EXPOSE ${WORKER_PORT} + +# Start healthcheck in background and flowise worker in foreground +CMD ["/bin/sh", "-c", "node /app/healthcheck/healthcheck.js & sleep 5 && pnpm run start-worker"] diff --git a/docker/worker/README.md b/docker/worker/README.md index 82769c1e2..b2299cd03 100644 --- a/docker/worker/README.md +++ b/docker/worker/README.md @@ -18,7 +18,11 @@ Hereโ€™s an overview of the process: ## Setting up Worker: -1. Copy paste the same `.env` file used to setup main server. Change the `PORT` to other available port numbers. Ex: 5566 -2. `docker compose up -d` -3. Open [http://localhost:5566](http://localhost:5566) +1. Navigate to `docker/worker` folder +2. In the `.env.example`, setup all the necessary env variables for `QUEUE CONFIGURATION`. Env variables for worker must match the one for main server. Change the `WORKER_PORT` to other available port numbers to listen for healthcheck. Ex: 5566 +3. `docker compose up -d` 4. You can bring the worker container down by `docker compose stop` + +## Entrypoint: + +Different from main server image which is using `flowise start`, entrypoint for worker is `pnpm run start-worker`. This is because the worker's [Dockerfile](./Dockerfile) build the image from source files via `pnpm build` instead of npm registry via `RUN npm install -g flowise`. diff --git a/docker/worker/docker-compose.yml b/docker/worker/docker-compose.yml index 193d9cd0d..da9e05792 100644 --- a/docker/worker/docker-compose.yml +++ b/docker/worker/docker-compose.yml @@ -2,16 +2,12 @@ version: '3.1' services: flowise: - image: flowiseai/flowise + image: flowiseai/flowise-worker:latest restart: always environment: - - PORT=${PORT} - - CORS_ORIGINS=${CORS_ORIGINS} - - IFRAME_ORIGINS=${IFRAME_ORIGINS} - - FLOWISE_USERNAME=${FLOWISE_USERNAME} - - FLOWISE_PASSWORD=${FLOWISE_PASSWORD} - - FLOWISE_FILE_SIZE_LIMIT=${FLOWISE_FILE_SIZE_LIMIT} - - DEBUG=${DEBUG} + - WORKER_PORT=${WORKER_PORT:-5566} + + # DATABASE - DATABASE_PATH=${DATABASE_PATH} - DATABASE_TYPE=${DATABASE_TYPE} - DATABASE_PORT=${DATABASE_PORT} @@ -21,37 +17,130 @@ services: - DATABASE_PASSWORD=${DATABASE_PASSWORD} - DATABASE_SSL=${DATABASE_SSL} - DATABASE_SSL_KEY_BASE64=${DATABASE_SSL_KEY_BASE64} - - APIKEY_STORAGE_TYPE=${APIKEY_STORAGE_TYPE} - - APIKEY_PATH=${APIKEY_PATH} + + # SECRET KEYS + - SECRETKEY_STORAGE_TYPE=${SECRETKEY_STORAGE_TYPE} - SECRETKEY_PATH=${SECRETKEY_PATH} - FLOWISE_SECRETKEY_OVERWRITE=${FLOWISE_SECRETKEY_OVERWRITE} - - LOG_LEVEL=${LOG_LEVEL} + - SECRETKEY_AWS_ACCESS_KEY=${SECRETKEY_AWS_ACCESS_KEY} + - SECRETKEY_AWS_SECRET_KEY=${SECRETKEY_AWS_SECRET_KEY} + - SECRETKEY_AWS_REGION=${SECRETKEY_AWS_REGION} + - SECRETKEY_AWS_NAME=${SECRETKEY_AWS_NAME} + + # LOGGING + - DEBUG=${DEBUG} - LOG_PATH=${LOG_PATH} + - LOG_LEVEL=${LOG_LEVEL} + - LOG_SANITIZE_BODY_FIELDS=${LOG_SANITIZE_BODY_FIELDS} + - LOG_SANITIZE_HEADER_FIELDS=${LOG_SANITIZE_HEADER_FIELDS} + + # CUSTOM TOOL/FUNCTION DEPENDENCIES + - TOOL_FUNCTION_BUILTIN_DEP=${TOOL_FUNCTION_BUILTIN_DEP} + - TOOL_FUNCTION_EXTERNAL_DEP=${TOOL_FUNCTION_EXTERNAL_DEP} + - ALLOW_BUILTIN_DEP=${ALLOW_BUILTIN_DEP} + + # STORAGE + - STORAGE_TYPE=${STORAGE_TYPE} - BLOB_STORAGE_PATH=${BLOB_STORAGE_PATH} + - S3_STORAGE_BUCKET_NAME=${S3_STORAGE_BUCKET_NAME} + - S3_STORAGE_ACCESS_KEY_ID=${S3_STORAGE_ACCESS_KEY_ID} + - S3_STORAGE_SECRET_ACCESS_KEY=${S3_STORAGE_SECRET_ACCESS_KEY} + - S3_STORAGE_REGION=${S3_STORAGE_REGION} + - S3_ENDPOINT_URL=${S3_ENDPOINT_URL} + - S3_FORCE_PATH_STYLE=${S3_FORCE_PATH_STYLE} + - GOOGLE_CLOUD_STORAGE_CREDENTIAL=${GOOGLE_CLOUD_STORAGE_CREDENTIAL} + - GOOGLE_CLOUD_STORAGE_PROJ_ID=${GOOGLE_CLOUD_STORAGE_PROJ_ID} + - GOOGLE_CLOUD_STORAGE_BUCKET_NAME=${GOOGLE_CLOUD_STORAGE_BUCKET_NAME} + - GOOGLE_CLOUD_UNIFORM_BUCKET_ACCESS=${GOOGLE_CLOUD_UNIFORM_BUCKET_ACCESS} + + # SETTINGS + - NUMBER_OF_PROXIES=${NUMBER_OF_PROXIES} + - CORS_ORIGINS=${CORS_ORIGINS} + - IFRAME_ORIGINS=${IFRAME_ORIGINS} + - FLOWISE_FILE_SIZE_LIMIT=${FLOWISE_FILE_SIZE_LIMIT} + - SHOW_COMMUNITY_NODES=${SHOW_COMMUNITY_NODES} + - DISABLE_FLOWISE_TELEMETRY=${DISABLE_FLOWISE_TELEMETRY} + - DISABLED_NODES=${DISABLED_NODES} - MODEL_LIST_CONFIG_JSON=${MODEL_LIST_CONFIG_JSON} + + # AUTH PARAMETERS + - APP_URL=${APP_URL} + - JWT_AUTH_TOKEN_SECRET=${JWT_AUTH_TOKEN_SECRET} + - JWT_REFRESH_TOKEN_SECRET=${JWT_REFRESH_TOKEN_SECRET} + - JWT_ISSUER=${JWT_ISSUER} + - JWT_AUDIENCE=${JWT_AUDIENCE} + - JWT_TOKEN_EXPIRY_IN_MINUTES=${JWT_TOKEN_EXPIRY_IN_MINUTES} + - JWT_REFRESH_TOKEN_EXPIRY_IN_MINUTES=${JWT_REFRESH_TOKEN_EXPIRY_IN_MINUTES} + - EXPIRE_AUTH_TOKENS_ON_RESTART=${EXPIRE_AUTH_TOKENS_ON_RESTART} + - EXPRESS_SESSION_SECRET=${EXPRESS_SESSION_SECRET} + - PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS=${PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS} + - PASSWORD_SALT_HASH_ROUNDS=${PASSWORD_SALT_HASH_ROUNDS} + - TOKEN_HASH_SECRET=${TOKEN_HASH_SECRET} + - SECURE_COOKIES=${SECURE_COOKIES} + + # EMAIL + - SMTP_HOST=${SMTP_HOST} + - SMTP_PORT=${SMTP_PORT} + - SMTP_USER=${SMTP_USER} + - SMTP_PASSWORD=${SMTP_PASSWORD} + - SMTP_SECURE=${SMTP_SECURE} + - ALLOW_UNAUTHORIZED_CERTS=${ALLOW_UNAUTHORIZED_CERTS} + - SENDER_EMAIL=${SENDER_EMAIL} + + # ENTERPRISE + - LICENSE_URL=${LICENSE_URL} + - FLOWISE_EE_LICENSE_KEY=${FLOWISE_EE_LICENSE_KEY} + - OFFLINE=${OFFLINE} + - INVITE_TOKEN_EXPIRY_IN_HOURS=${INVITE_TOKEN_EXPIRY_IN_HOURS} + - WORKSPACE_INVITE_TEMPLATE_PATH=${WORKSPACE_INVITE_TEMPLATE_PATH} + + # METRICS COLLECTION + - POSTHOG_PUBLIC_API_KEY=${POSTHOG_PUBLIC_API_KEY} + - ENABLE_METRICS=${ENABLE_METRICS} + - METRICS_PROVIDER=${METRICS_PROVIDER} + - METRICS_INCLUDE_NODE_METRICS=${METRICS_INCLUDE_NODE_METRICS} + - METRICS_SERVICE_NAME=${METRICS_SERVICE_NAME} + - METRICS_OPEN_TELEMETRY_METRIC_ENDPOINT=${METRICS_OPEN_TELEMETRY_METRIC_ENDPOINT} + - METRICS_OPEN_TELEMETRY_PROTOCOL=${METRICS_OPEN_TELEMETRY_PROTOCOL} + - METRICS_OPEN_TELEMETRY_DEBUG=${METRICS_OPEN_TELEMETRY_DEBUG} + + # PROXY - GLOBAL_AGENT_HTTP_PROXY=${GLOBAL_AGENT_HTTP_PROXY} - GLOBAL_AGENT_HTTPS_PROXY=${GLOBAL_AGENT_HTTPS_PROXY} - GLOBAL_AGENT_NO_PROXY=${GLOBAL_AGENT_NO_PROXY} - - DISABLED_NODES=${DISABLED_NODES} + + # QUEUE CONFIGURATION - MODE=${MODE} - - WORKER_CONCURRENCY=${WORKER_CONCURRENCY} - QUEUE_NAME=${QUEUE_NAME} - QUEUE_REDIS_EVENT_STREAM_MAX_LEN=${QUEUE_REDIS_EVENT_STREAM_MAX_LEN} + - WORKER_CONCURRENCY=${WORKER_CONCURRENCY} - REMOVE_ON_AGE=${REMOVE_ON_AGE} - REMOVE_ON_COUNT=${REMOVE_ON_COUNT} - REDIS_URL=${REDIS_URL} - REDIS_HOST=${REDIS_HOST} - REDIS_PORT=${REDIS_PORT} - - REDIS_PASSWORD=${REDIS_PASSWORD} - REDIS_USERNAME=${REDIS_USERNAME} + - REDIS_PASSWORD=${REDIS_PASSWORD} - REDIS_TLS=${REDIS_TLS} - REDIS_CERT=${REDIS_CERT} - REDIS_KEY=${REDIS_KEY} - REDIS_CA=${REDIS_CA} - REDIS_KEEP_ALIVE=${REDIS_KEEP_ALIVE} - ENABLE_BULLMQ_DASHBOARD=${ENABLE_BULLMQ_DASHBOARD} + + # SECURITY + - CUSTOM_MCP_SECURITY_CHECK=${CUSTOM_MCP_SECURITY_CHECK} + - CUSTOM_MCP_PROTOCOL=${CUSTOM_MCP_PROTOCOL} + - HTTP_DENY_LIST=${HTTP_DENY_LIST} + - TRUST_PROXY=${TRUST_PROXY} ports: - - '${PORT}:${PORT}' + - '${WORKER_PORT}:${WORKER_PORT}' + healthcheck: + test: ['CMD', 'curl', '-f', 'http://localhost:${WORKER_PORT}/healthz'] + interval: 10s + timeout: 5s + retries: 5 + start_period: 30s volumes: - ~/.flowise:/root/.flowise - entrypoint: /bin/sh -c "sleep 3; flowise worker" + entrypoint: /bin/sh -c "node /app/healthcheck/healthcheck.js & sleep 5 && pnpm run start-worker" diff --git a/docker/worker/healthcheck/healthcheck.js b/docker/worker/healthcheck/healthcheck.js new file mode 100644 index 000000000..fcc204f7d --- /dev/null +++ b/docker/worker/healthcheck/healthcheck.js @@ -0,0 +1,13 @@ +const express = require('express') +const app = express() + +const port = process.env.WORKER_PORT || 5566 + +app.get('/healthz', (req, res) => { + res.status(200).send('OK') +}) + +app.listen(port, () => { + // eslint-disable-next-line no-console + console.log(`Healthcheck server listening on port ${port}`) +}) diff --git a/docker/worker/healthcheck/package.json b/docker/worker/healthcheck/package.json new file mode 100644 index 000000000..aa7bfd6be --- /dev/null +++ b/docker/worker/healthcheck/package.json @@ -0,0 +1,13 @@ +{ + "name": "flowise-worker-healthcheck", + "version": "1.0.0", + "description": "Simple healthcheck server for Flowise worker", + "main": "healthcheck.js", + "private": true, + "scripts": { + "start": "node healthcheck.js" + }, + "dependencies": { + "express": "^4.19.2" + } +} diff --git a/i18n/CONTRIBUTING-ZH.md b/i18n/CONTRIBUTING-ZH.md index 45626785e..d6b019892 100644 --- a/i18n/CONTRIBUTING-ZH.md +++ b/i18n/CONTRIBUTING-ZH.md @@ -112,45 +112,41 @@ Flowise ๅœจไธ€ไธชๅ•ไธ€็š„ๅ•ไฝ“ๅญ˜ๅ‚จๅบ“ไธญๆœ‰ 3 ไธชไธๅŒ็š„ๆจกๅ—ใ€‚ pnpm start ``` -11. ๆไบคไปฃ็ ๅนถไปŽๆŒ‡ๅ‘ [Flowise ไธปๅˆ†ๆ”ฏ](https://github.com/FlowiseAI/Flowise/tree/master) ็š„ๅˆ†ๅ‰ๅˆ†ๆ”ฏไธŠๆไบค Pull Requestใ€‚ +11. ๆไบคไปฃ็ ๅนถไปŽๆŒ‡ๅ‘ [Flowise ไธปๅˆ†ๆ”ฏ](https://github.com/FlowiseAI/Flowise/tree/main) ็š„ๅˆ†ๅ‰ๅˆ†ๆ”ฏไธŠๆไบค Pull Requestใ€‚ ## ๐ŸŒฑ ็Žฏๅขƒๅ˜้‡ Flowise ๆ”ฏๆŒไธๅŒ็š„็Žฏๅขƒๅ˜้‡ๆฅ้…็ฝฎๆ‚จ็š„ๅฎžไพ‹ใ€‚ๆ‚จๅฏไปฅๅœจ `packages/server` ๆ–‡ไปถๅคนไธญ็š„ `.env` ๆ–‡ไปถไธญๆŒ‡ๅฎšไปฅไธ‹ๅ˜้‡ใ€‚้˜…่ฏป[ๆ›ดๅคšไฟกๆฏ](https://docs.flowiseai.com/environment-variables) -| ๅ˜้‡ๅ | ๆ่ฟฐ | ็ฑปๅž‹ | ้ป˜่ฎคๅ€ผ | -| ---------------------------- | ------------------------------------------------------- | ----------------------------------------------- | ----------------------------------- | --- | -| PORT | Flowise ่ฟ่กŒ็š„ HTTP ็ซฏๅฃ | ๆ•ฐๅญ— | 3000 | -| FLOWISE_USERNAME | ็™ปๅฝ•็”จๆˆทๅ | ๅญ—็ฌฆไธฒ | | -| FLOWISE_PASSWORD | ็™ปๅฝ•ๅฏ†็  | ๅญ—็ฌฆไธฒ | | -| FLOWISE_FILE_SIZE_LIMIT | ไธŠไผ ๆ–‡ไปถๅคงๅฐ้™ๅˆถ | ๅญ—็ฌฆไธฒ | 50mb | | -| DEBUG | ๆ‰“ๅฐ็ป„ไปถ็š„ๆ—ฅๅฟ— | ๅธƒๅฐ”ๅ€ผ | | -| LOG_PATH | ๅญ˜ๅ‚จๆ—ฅๅฟ—ๆ–‡ไปถ็š„ไฝ็ฝฎ | ๅญ—็ฌฆไธฒ | `your-path/Flowise/logs` | -| LOG_LEVEL | ๆ—ฅๅฟ—็š„ไธๅŒ็บงๅˆซ | ๆžšไธพๅญ—็ฌฆไธฒ: `error`, `info`, `verbose`, `debug` | `info` | -| APIKEY_STORAGE_TYPE | ๅญ˜ๅ‚จ API ๅฏ†้’ฅ็š„ๅญ˜ๅ‚จ็ฑปๅž‹ | ๆžšไธพๅญ—็ฌฆไธฒ: `json`, `db` | `json` | -| APIKEY_PATH | ๅญ˜ๅ‚จ API ๅฏ†้’ฅ็š„ไฝ็ฝฎ, ๅฝ“`APIKEY_STORAGE_TYPE`ๆ˜ฏ`json` | ๅญ—็ฌฆไธฒ | `your-path/Flowise/packages/server` | -| TOOL_FUNCTION_BUILTIN_DEP | ็”จไบŽๅทฅๅ…ทๅ‡ฝๆ•ฐ็š„ NodeJS ๅ†…็ฝฎๆจกๅ— | ๅญ—็ฌฆไธฒ | | -| TOOL_FUNCTION_EXTERNAL_DEP | ็”จไบŽๅทฅๅ…ทๅ‡ฝๆ•ฐ็š„ๅค–้ƒจๆจกๅ— | ๅญ—็ฌฆไธฒ | | -| DATABASE_TYPE | ๅญ˜ๅ‚จ flowise ๆ•ฐๆฎ็š„ๆ•ฐๆฎๅบ“็ฑปๅž‹ | ๆžšไธพๅญ—็ฌฆไธฒ: `sqlite`, `mysql`, `postgres` | `sqlite` | -| DATABASE_PATH | ๆ•ฐๆฎๅบ“ไฟๅญ˜็š„ไฝ็ฝฎ๏ผˆๅฝ“ DATABASE_TYPE ๆ˜ฏ sqlite ๆ—ถ๏ผ‰ | ๅญ—็ฌฆไธฒ | `your-home-dir/.flowise` | -| DATABASE_HOST | ไธปๆœบ URL ๆˆ– IP ๅœฐๅ€๏ผˆๅฝ“ DATABASE_TYPE ไธๆ˜ฏ sqlite ๆ—ถ๏ผ‰ | ๅญ—็ฌฆไธฒ | | -| DATABASE_PORT | ๆ•ฐๆฎๅบ“็ซฏๅฃ๏ผˆๅฝ“ DATABASE_TYPE ไธๆ˜ฏ sqlite ๆ—ถ๏ผ‰ | ๅญ—็ฌฆไธฒ | | -| DATABASE_USERNAME | ๆ•ฐๆฎๅบ“็”จๆˆทๅ๏ผˆๅฝ“ DATABASE_TYPE ไธๆ˜ฏ sqlite ๆ—ถ๏ผ‰ | ๅญ—็ฌฆไธฒ | | -| DATABASE_PASSWORD | ๆ•ฐๆฎๅบ“ๅฏ†็ ๏ผˆๅฝ“ DATABASE_TYPE ไธๆ˜ฏ sqlite ๆ—ถ๏ผ‰ | ๅญ—็ฌฆไธฒ | | -| DATABASE_NAME | ๆ•ฐๆฎๅบ“ๅ็งฐ๏ผˆๅฝ“ DATABASE_TYPE ไธๆ˜ฏ sqlite ๆ—ถ๏ผ‰ | ๅญ—็ฌฆไธฒ | | -| SECRETKEY_PATH | ไฟๅญ˜ๅŠ ๅฏ†ๅฏ†้’ฅ๏ผˆ็”จไบŽๅŠ ๅฏ†/่งฃๅฏ†ๅ‡ญๆฎ๏ผ‰็š„ไฝ็ฝฎ | ๅญ—็ฌฆไธฒ | `your-path/Flowise/packages/server` | -| FLOWISE_SECRETKEY_OVERWRITE | ๅŠ ๅฏ†ๅฏ†้’ฅ็”จไบŽๆ›ฟไปฃๅญ˜ๅ‚จๅœจ SECRETKEY_PATH ไธญ็š„ๅฏ†้’ฅ | ๅญ—็ฌฆไธฒ | -| MODEL_LIST_CONFIG_JSON | ๅŠ ่ฝฝๆจกๅž‹็š„ไฝ็ฝฎ | ๅญ—็ฌฆ | `/your_model_list_config_file_path` | -| STORAGE_TYPE | ไธŠไผ ๆ–‡ไปถ็š„ๅญ˜ๅ‚จ็ฑปๅž‹ | ๆžšไธพๅญ—็ฌฆไธฒ: `local`, `s3` | `local` | -| BLOB_STORAGE_PATH | ไธŠไผ ๆ–‡ไปถๅญ˜ๅ‚จ็š„ๆœฌๅœฐๆ–‡ไปถๅคน่ทฏๅพ„, ๅฝ“`STORAGE_TYPE`ๆ˜ฏ`local` | ๅญ—็ฌฆไธฒ | `your-home-dir/.flowise/storage` | -| S3_STORAGE_BUCKET_NAME | S3 ๅญ˜ๅ‚จๆ–‡ไปถๅคน่ทฏๅพ„, ๅฝ“`STORAGE_TYPE`ๆ˜ฏ`s3` | ๅญ—็ฌฆไธฒ | | -| S3_STORAGE_ACCESS_KEY_ID | AWS ่ฎฟ้—ฎๅฏ†้’ฅ (Access Key) | ๅญ—็ฌฆไธฒ | | -| S3_STORAGE_SECRET_ACCESS_KEY | AWS ๅฏ†้’ฅ (Secret Key) | ๅญ—็ฌฆไธฒ | | -| S3_STORAGE_REGION | S3 ๅญ˜ๅ‚จๅœฐๅŒบ | ๅญ—็ฌฆไธฒ | | -| S3_ENDPOINT_URL | S3 ็ซฏ็‚น URL | ๅญ—็ฌฆไธฒ | | -| S3_FORCE_PATH_STYLE | ๅฐ†ๅ…ถ่ฎพ็ฝฎไธบ true ไปฅๅผบๅˆถ่ฏทๆฑ‚ไฝฟ็”จ่ทฏๅพ„ๆ ทๅผๅฏปๅ€ | ๅธƒๅฐ”ๅ€ผ | false | -| SHOW_COMMUNITY_NODES | ๆ˜พ็คบ็”ฑ็คพๅŒบๅˆ›ๅปบ็š„่Š‚็‚น | ๅธƒๅฐ”ๅ€ผ | | -| DISABLED_NODES | ไปŽ็•Œ้ขไธญ้š่—่Š‚็‚น๏ผˆไปฅ้€—ๅทๅˆ†้š”็š„่Š‚็‚นๅ็งฐๅˆ—่กจ๏ผ‰ | ๅญ—็ฌฆไธฒ | | +| ๅ˜้‡ๅ | ๆ่ฟฐ | ็ฑปๅž‹ | ้ป˜่ฎคๅ€ผ | +| ------------------------------ | -------------------------------------------------------- | ----------------------------------------------- | ----------------------------------- | +| `PORT` | Flowise ่ฟ่กŒ็š„ HTTP ็ซฏๅฃ | ๆ•ฐๅญ— | 3000 | +| `FLOWISE_FILE_SIZE_LIMIT` | ไธŠไผ ๆ–‡ไปถๅคงๅฐ้™ๅˆถ | ๅญ—็ฌฆไธฒ | 50mb | +| `DEBUG` | ๆ‰“ๅฐ็ป„ไปถ็š„ๆ—ฅๅฟ— | ๅธƒๅฐ”ๅ€ผ | | +| `LOG_PATH` | ๅญ˜ๅ‚จๆ—ฅๅฟ—ๆ–‡ไปถ็š„ไฝ็ฝฎ | ๅญ—็ฌฆไธฒ | `your-path/Flowise/logs` | +| `LOG_LEVEL` | ๆ—ฅๅฟ—็š„ไธๅŒ็บงๅˆซ | ๆžšไธพๅญ—็ฌฆไธฒ: `error`, `info`, `verbose`, `debug` | `info` | +| `TOOL_FUNCTION_BUILTIN_DEP` | ็”จไบŽๅทฅๅ…ทๅ‡ฝๆ•ฐ็š„ NodeJS ๅ†…็ฝฎๆจกๅ— | ๅญ—็ฌฆไธฒ | | +| `TOOL_FUNCTION_EXTERNAL_DEP` | ็”จไบŽๅทฅๅ…ทๅ‡ฝๆ•ฐ็š„ๅค–้ƒจๆจกๅ— | ๅญ—็ฌฆไธฒ | | +| `DATABASE_TYPE` | ๅญ˜ๅ‚จ Flowise ๆ•ฐๆฎ็š„ๆ•ฐๆฎๅบ“็ฑปๅž‹ | ๆžšไธพๅญ—็ฌฆไธฒ: `sqlite`, `mysql`, `postgres` | `sqlite` | +| `DATABASE_PATH` | ๆ•ฐๆฎๅบ“ไฟๅญ˜็š„ไฝ็ฝฎ๏ผˆๅฝ“ `DATABASE_TYPE` ๆ˜ฏ sqlite ๆ—ถ๏ผ‰ | ๅญ—็ฌฆไธฒ | `your-home-dir/.flowise` | +| `DATABASE_HOST` | ไธปๆœบ URL ๆˆ– IP ๅœฐๅ€๏ผˆๅฝ“ `DATABASE_TYPE` ไธๆ˜ฏ sqlite ๆ—ถ๏ผ‰ | ๅญ—็ฌฆไธฒ | | +| `DATABASE_PORT` | ๆ•ฐๆฎๅบ“็ซฏๅฃ๏ผˆๅฝ“ `DATABASE_TYPE` ไธๆ˜ฏ sqlite ๆ—ถ๏ผ‰ | ๅญ—็ฌฆไธฒ | | +| `DATABASE_USERNAME` | ๆ•ฐๆฎๅบ“็”จๆˆทๅ๏ผˆๅฝ“ `DATABASE_TYPE` ไธๆ˜ฏ sqlite ๆ—ถ๏ผ‰ | ๅญ—็ฌฆไธฒ | | +| `DATABASE_PASSWORD` | ๆ•ฐๆฎๅบ“ๅฏ†็ ๏ผˆๅฝ“ `DATABASE_TYPE` ไธๆ˜ฏ sqlite ๆ—ถ๏ผ‰ | ๅญ—็ฌฆไธฒ | | +| `DATABASE_NAME` | ๆ•ฐๆฎๅบ“ๅ็งฐ๏ผˆๅฝ“ `DATABASE_TYPE` ไธๆ˜ฏ sqlite ๆ—ถ๏ผ‰ | ๅญ—็ฌฆไธฒ | | +| `SECRETKEY_PATH` | ไฟๅญ˜ๅŠ ๅฏ†ๅฏ†้’ฅ๏ผˆ็”จไบŽๅŠ ๅฏ†/่งฃๅฏ†ๅ‡ญๆฎ๏ผ‰็š„ไฝ็ฝฎ | ๅญ—็ฌฆไธฒ | `your-path/Flowise/packages/server` | +| `FLOWISE_SECRETKEY_OVERWRITE` | ๅŠ ๅฏ†ๅฏ†้’ฅ็”จไบŽๆ›ฟไปฃๅญ˜ๅ‚จๅœจ `SECRETKEY_PATH` ไธญ็š„ๅฏ†้’ฅ | ๅญ—็ฌฆไธฒ | | +| `MODEL_LIST_CONFIG_JSON` | ๅŠ ่ฝฝๆจกๅž‹็š„ไฝ็ฝฎ | ๅญ—็ฌฆไธฒ | `/your_model_list_config_file_path` | +| `STORAGE_TYPE` | ไธŠไผ ๆ–‡ไปถ็š„ๅญ˜ๅ‚จ็ฑปๅž‹ | ๆžšไธพๅญ—็ฌฆไธฒ: `local`, `s3` | `local` | +| `BLOB_STORAGE_PATH` | ๆœฌๅœฐไธŠไผ ๆ–‡ไปถๅญ˜ๅ‚จ่ทฏๅพ„๏ผˆๅฝ“ `STORAGE_TYPE` ไธบ `local`๏ผ‰ | ๅญ—็ฌฆไธฒ | `your-home-dir/.flowise/storage` | +| `S3_STORAGE_BUCKET_NAME` | S3 ๅญ˜ๅ‚จๆ–‡ไปถๅคน่ทฏๅพ„๏ผˆๅฝ“ `STORAGE_TYPE` ไธบ `s3`๏ผ‰ | ๅญ—็ฌฆไธฒ | | +| `S3_STORAGE_ACCESS_KEY_ID` | AWS ่ฎฟ้—ฎๅฏ†้’ฅ (Access Key) | ๅญ—็ฌฆไธฒ | | +| `S3_STORAGE_SECRET_ACCESS_KEY` | AWS ๅฏ†้’ฅ (Secret Key) | ๅญ—็ฌฆไธฒ | | +| `S3_STORAGE_REGION` | S3 ๅญ˜ๅ‚จๅœฐๅŒบ | ๅญ—็ฌฆไธฒ | | +| `S3_ENDPOINT_URL` | S3 ็ซฏ็‚น URL | ๅญ—็ฌฆไธฒ | | +| `S3_FORCE_PATH_STYLE` | ่ฎพ็ฝฎไธบ true ไปฅๅผบๅˆถ่ฏทๆฑ‚ไฝฟ็”จ่ทฏๅพ„ๆ ทๅผๅฏปๅ€ | ๅธƒๅฐ”ๅ€ผ | false | +| `SHOW_COMMUNITY_NODES` | ๆ˜พ็คบ็”ฑ็คพๅŒบๅˆ›ๅปบ็š„่Š‚็‚น | ๅธƒๅฐ”ๅ€ผ | | +| `DISABLED_NODES` | ไปŽ็•Œ้ขไธญ้š่—่Š‚็‚น๏ผˆไปฅ้€—ๅทๅˆ†้š”็š„่Š‚็‚นๅ็งฐๅˆ—่กจ๏ผ‰ | ๅญ—็ฌฆไธฒ | | ๆ‚จไนŸๅฏไปฅๅœจไฝฟ็”จ `npx` ๆ—ถๆŒ‡ๅฎš็Žฏๅขƒๅ˜้‡ใ€‚ไพ‹ๅฆ‚๏ผš diff --git a/i18n/README-JA.md b/i18n/README-JA.md index a329059ed..0ea1ae386 100644 --- a/i18n/README-JA.md +++ b/i18n/README-JA.md @@ -31,12 +31,6 @@ npx flowise start ``` - ใƒฆใƒผใ‚ถใƒผๅใจใƒ‘ใ‚นใƒฏใƒผใƒ‰ใ‚’ๅ…ฅๅŠ› - - ```bash - npx flowise start --FLOWISE_USERNAME=user --FLOWISE_PASSWORD=1234 - ``` - 3. [http://localhost:3000](http://localhost:3000) ใ‚’้–‹ใ ## ๐Ÿณ Docker @@ -127,15 +121,6 @@ Flowise ใซใฏใ€3 ใคใฎ็•ฐใชใ‚‹ใƒขใ‚ธใƒฅใƒผใƒซใŒ 1 ใคใฎ mono ใƒชใƒใ‚ธใƒˆ ใ‚ณใƒผใƒ‰ใฎๅค‰ๆ›ดใฏ [http://localhost:8080](http://localhost:8080) ใซ่‡ชๅ‹•็š„ใซใ‚ขใƒ—ใƒชใ‚’ใƒชใƒญใƒผใƒ‰ใ—ใพใ™ -## ๐Ÿ”’ ่ช่จผ - -ใ‚ขใƒ—ใƒชใƒฌใƒ™ใƒซใฎ่ช่จผใ‚’ๆœ‰ๅŠนใซใ™ใ‚‹ใซใฏใ€ `FLOWISE_USERNAME` ใจ `FLOWISE_PASSWORD` ใ‚’ `packages/server` ใฎ `.env` ใƒ•ใ‚กใ‚คใƒซใซ่ฟฝๅŠ ใ—ใพใ™: - -``` -FLOWISE_USERNAME=user -FLOWISE_PASSWORD=1234 -``` - ## ๐ŸŒฑ ็’ฐๅขƒๅค‰ๆ•ฐ Flowise ใฏใ€ใ‚คใƒณใ‚นใ‚ฟใƒณใ‚นใ‚’่จญๅฎšใ™ใ‚‹ใŸใ‚ใฎใ•ใพใ–ใพใช็’ฐๅขƒๅค‰ๆ•ฐใ‚’ใ‚ตใƒใƒผใƒˆใ—ใฆใ„ใพใ™ใ€‚`packages/server` ใƒ•ใ‚ฉใƒซใƒ€ๅ†…ใฎ `.env` ใƒ•ใ‚กใ‚คใƒซใงไปฅไธ‹ใฎๅค‰ๆ•ฐใ‚’ๆŒ‡ๅฎšใ™ใ‚‹ใ“ใจใŒใงใใ‚‹ใ€‚[็ถšใ](https://github.com/FlowiseAI/Flowise/blob/main/CONTRIBUTING.md#-env-variables)ใ‚’่ชญใ‚€ @@ -197,9 +182,9 @@ Flowise ใฏใ€ใ‚คใƒณใ‚นใ‚ฟใƒณใ‚นใ‚’่จญๅฎšใ™ใ‚‹ใŸใ‚ใฎใ•ใพใ–ใพใช็’ฐๅขƒๅค‰ -[ใ‚ณใƒณใƒˆใƒชใƒ“ใƒฅใƒผใƒ†ใ‚ฃใƒณใ‚ฐใ‚ฌใ‚คใƒ‰](CONTRIBUTING.md)ใ‚’ๅ‚็…งใ—ใฆใใ ใ•ใ„ใ€‚่ณชๅ•ใ‚„ๅ•้กŒใŒใ‚ใ‚Œใฐใ€[Discord](https://discord.gg/jbaHfsRVBW) ใพใงใ”้€ฃ็ตกใใ ใ•ใ„ใ€‚ +[ใ‚ณใƒณใƒˆใƒชใƒ“ใƒฅใƒผใƒ†ใ‚ฃใƒณใ‚ฐใ‚ฌใ‚คใƒ‰](../CONTRIBUTING.md)ใ‚’ๅ‚็…งใ—ใฆใใ ใ•ใ„ใ€‚่ณชๅ•ใ‚„ๅ•้กŒใŒใ‚ใ‚Œใฐใ€[Discord](https://discord.gg/jbaHfsRVBW) ใพใงใ”้€ฃ็ตกใใ ใ•ใ„ใ€‚ [![Star History Chart](https://api.star-history.com/svg?repos=FlowiseAI/Flowise&type=Timeline)](https://star-history.com/#FlowiseAI/Flowise&Date) ## ๐Ÿ“„ ใƒฉใ‚คใ‚ปใƒณใ‚น -ใ“ใฎใƒชใƒใ‚ธใƒˆใƒชใฎใ‚ฝใƒผใ‚นใ‚ณใƒผใƒ‰ใฏใ€[Apache License Version 2.0](LICENSE.md)ใฎไธ‹ใงๅˆฉ็”จๅฏ่ƒฝใงใ™ใ€‚ +ใ“ใฎใƒชใƒใ‚ธใƒˆใƒชใฎใ‚ฝใƒผใ‚นใ‚ณใƒผใƒ‰ใฏใ€[Apache License Version 2.0](../LICENSE.md)ใฎไธ‹ใงๅˆฉ็”จๅฏ่ƒฝใงใ™ใ€‚ diff --git a/i18n/README-KR.md b/i18n/README-KR.md index c02b0b066..7caaa01a4 100644 --- a/i18n/README-KR.md +++ b/i18n/README-KR.md @@ -31,12 +31,6 @@ npx flowise start ``` - ์‚ฌ์šฉ์ž ์ด๋ฆ„๊ณผ ๋น„๋ฐ€๋ฒˆํ˜ธ๋กœ ์‹œ์ž‘ํ•˜๊ธฐ - - ```bash - npx flowise start --FLOWISE_USERNAME=user --FLOWISE_PASSWORD=1234 - ``` - 3. [http://localhost:3000](http://localhost:3000) URL ์—ด๊ธฐ ## ๐Ÿณ ๋„์ปค(Docker)๋ฅผ ํ™œ์šฉํ•˜์—ฌ ์‹œ์ž‘ํ•˜๊ธฐ @@ -127,15 +121,6 @@ Flowise๋Š” ๋‹จ์ผ ๋ฆฌํฌ์ง€ํ† ๋ฆฌ์— 3๊ฐœ์˜ ์„œ๋กœ ๋‹ค๋ฅธ ๋ชจ๋“ˆ์ด ์žˆ์Šต๋‹ˆ ์ฝ”๋“œ๊ฐ€ ๋ณ€๊ฒฝ๋˜๋ฉด [http://localhost:8080](http://localhost:8080)์—์„œ ์ž๋™์œผ๋กœ ์• ํ”Œ๋ฆฌ์ผ€์ด์…˜์„ ์ƒˆ๋กœ๊ณ ์นจ ํ•ฉ๋‹ˆ๋‹ค. -## ๐Ÿ”’ ์ธ์ฆ - -์• ํ”Œ๋ฆฌ์ผ€์ด์…˜ ์ˆ˜์ค€์˜ ์ธ์ฆ์„ ์‚ฌ์šฉํ•˜๋ ค๋ฉด `packages/server`์˜ `.env` ํŒŒ์ผ์— `FLOWISE_USERNAME` ๋ฐ `FLOWISE_PASSWORD`๋ฅผ ์ถ”๊ฐ€ํ•ฉ๋‹ˆ๋‹ค: - -``` -FLOWISE_USERNAME=user -FLOWISE_PASSWORD=1234 -``` - ## ๐ŸŒฑ ํ™˜๊ฒฝ ๋ณ€์ˆ˜ Flowise๋Š” ์ธ์Šคํ„ด์Šค ๊ตฌ์„ฑ์„ ์œ„ํ•œ ๋‹ค์–‘ํ•œ ํ™˜๊ฒฝ ๋ณ€์ˆ˜๋ฅผ ์ง€์›ํ•ฉ๋‹ˆ๋‹ค. `packages/server` ํด๋” ๋‚ด `.env` ํŒŒ์ผ์— ๋‹ค์–‘ํ•œ ํ™˜๊ฒฝ ๋ณ€์ˆ˜๋ฅผ ์ง€์ •ํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. [์ž์„ธํžˆ ๋ณด๊ธฐ](https://github.com/FlowiseAI/Flowise/blob/main/CONTRIBUTING.md#-env-variables) @@ -197,9 +182,9 @@ Flowise๋Š” ์ธ์Šคํ„ด์Šค ๊ตฌ์„ฑ์„ ์œ„ํ•œ ๋‹ค์–‘ํ•œ ํ™˜๊ฒฝ ๋ณ€์ˆ˜๋ฅผ ์ง€์›ํ•ฉ๋‹ˆ -[contributing guide](CONTRIBUTING.md)๋ฅผ ์‚ดํŽด๋ณด์„ธ์š”. ๋””์Šค์ฝ”๋“œ [Discord](https://discord.gg/jbaHfsRVBW) ์ฑ„๋„์—์„œ๋„ ์ด์Šˆ๋‚˜ ์งˆ์˜์‘๋‹ต์„ ์ง„ํ–‰ํ•˜์‹ค ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. +[contributing guide](../CONTRIBUTING.md)๋ฅผ ์‚ดํŽด๋ณด์„ธ์š”. ๋””์Šค์ฝ”๋“œ [Discord](https://discord.gg/jbaHfsRVBW) ์ฑ„๋„์—์„œ๋„ ์ด์Šˆ๋‚˜ ์งˆ์˜์‘๋‹ต์„ ์ง„ํ–‰ํ•˜์‹ค ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. [![Star History Chart](https://api.star-history.com/svg?repos=FlowiseAI/Flowise&type=Timeline)](https://star-history.com/#FlowiseAI/Flowise&Date) ## ๐Ÿ“„ ๋ผ์ด์„ผ์Šค -๋ณธ ๋ฆฌํฌ์ง€ํ† ๋ฆฌ์˜ ์†Œ์Šค์ฝ”๋“œ๋Š” [Apache License Version 2.0](LICENSE.md) ๋ผ์ด์„ผ์Šค๊ฐ€ ์ ์šฉ๋ฉ๋‹ˆ๋‹ค. +๋ณธ ๋ฆฌํฌ์ง€ํ† ๋ฆฌ์˜ ์†Œ์Šค์ฝ”๋“œ๋Š” [Apache License Version 2.0](../LICENSE.md) ๋ผ์ด์„ผ์Šค๊ฐ€ ์ ์šฉ๋ฉ๋‹ˆ๋‹ค. diff --git a/i18n/README-TW.md b/i18n/README-TW.md index f051e844e..c8fbfedbb 100644 --- a/i18n/README-TW.md +++ b/i18n/README-TW.md @@ -13,7 +13,7 @@ [English](../README.md) | ็น้ซ”ไธญๆ–‡ | [็ฎ€ไฝ“ไธญๆ–‡](./README-ZH.md) | [ๆ—ฅๆœฌ่ชž](./README-JA.md) | [ํ•œ๊ตญ์–ด](./README-KR.md) -

ๅฏ่ฆ–ๅŒ–ๅปบๆง‹ AI/LLM ๆต็จ‹

+

ๅฏ่ฆ–ๅŒ–ๅปบ็ฝฎ AI/LLM ๆต็จ‹

@@ -31,28 +31,22 @@ npx flowise start ``` - ไฝฟ็”จ็”จๆˆถๅๅ’Œๅฏ†็ขผ - - ```bash - npx flowise start --FLOWISE_USERNAME=user --FLOWISE_PASSWORD=1234 - ``` - 3. ๆ‰“้–‹ [http://localhost:3000](http://localhost:3000) ## ๐Ÿณ Docker ### Docker Compose -1. ๅ…‹้š† Flowise ้ …็›ฎ -2. ้€ฒๅ…ฅ้ …็›ฎๆ น็›ฎ้Œ„็š„ `docker` ๆ–‡ไปถๅคพ -3. ่ค‡่ฃฝ `.env.example` ๆ–‡ไปถ๏ผŒ็ฒ˜่ฒผๅˆฐ็›ธๅŒไฝ็ฝฎ๏ผŒไธฆ้‡ๅ‘ฝๅ็‚บ `.env` ๆ–‡ไปถ +1. ่ค‡่ฃฝ Flowise ๅฐˆๆกˆ +2. ้€ฒๅ…ฅๅฐˆๆกˆๆ น็›ฎ้Œ„็š„ `docker` ่ณ‡ๆ–™ๅคพ +3. ่ค‡่ฃฝ `.env.example` ๆ–‡ไปถ๏ผŒ่ฒผๅˆฐ็›ธๅŒไฝ็ฝฎ๏ผŒไธฆ้‡ๆ–ฐๅ‘ฝๅ็‚บ `.env` ๆ–‡ไปถ 4. `docker compose up -d` 5. ๆ‰“้–‹ [http://localhost:3000](http://localhost:3000) -6. ๆ‚จๅฏไปฅ้€š้Ž `docker compose stop` ๅœๆญขๅฎนๅ™จ +6. ๆ‚จๅฏไปฅ้€้Ž `docker compose stop` ๅœๆญขๅฎนๅ™จ ### Docker ๆ˜ ๅƒ -1. ๆœฌๅœฐๆง‹ๅปบๆ˜ ๅƒ๏ผš +1. ๆœฌๅœฐๅปบ็ฝฎๆ˜ ๅƒ๏ผš ```bash docker build --no-cache -t flowise . ``` @@ -69,7 +63,7 @@ ## ๐Ÿ‘จโ€๐Ÿ’ป ้–‹็™ผ่€… -Flowise ๅœจๅ–ฎๅ€‹ mono ๅญ˜ๅ„ฒๅบซไธญๆœ‰ 3 ๅ€‹ไธๅŒ็š„ๆจกๅกŠใ€‚ +Flowise ๅœจๅ–ฎๅ€‹ mono ๅ„ฒๅญ˜ๅบซไธญๆœ‰ 3 ๅ€‹ไธๅŒ็š„ๆจก็ต„ใ€‚ - `server`: ๆไพ› API ้‚่ผฏ็š„ Node ๅพŒ็ซฏ - `ui`: React ๅ‰็ซฏ @@ -85,33 +79,33 @@ Flowise ๅœจๅ–ฎๅ€‹ mono ๅญ˜ๅ„ฒๅบซไธญๆœ‰ 3 ๅ€‹ไธๅŒ็š„ๆจกๅกŠใ€‚ ### ่จญ็ฝฎ -1. ๅ…‹้š†ๅญ˜ๅ„ฒๅบซ +1. ่ค‡่ฃฝๅ„ฒๅญ˜ๅบซ ```bash git clone https://github.com/FlowiseAI/Flowise.git ``` -2. ้€ฒๅ…ฅๅญ˜ๅ„ฒๅบซๆ–‡ไปถๅคพ +2. ้€ฒๅ…ฅๅ„ฒๅญ˜ๅบซๆ–‡ไปถๅคพ ```bash cd Flowise ``` -3. ๅฎ‰่ฃๆ‰€ๆœ‰ๆจกๅกŠ็š„ๆ‰€ๆœ‰ไพ่ณด้ …๏ผš +3. ๅฎ‰่ฃๆ‰€ๆœ‰ๆจก็ต„็š„ๆ‰€ๆœ‰ไพ่ณด้ …๏ผš ```bash pnpm install ``` -4. ๆง‹ๅปบๆ‰€ๆœ‰ไปฃ็ขผ๏ผš +4. ๅปบ็ฝฎๆ‰€ๆœ‰็จ‹ๅผ็ขผ๏ผš ```bash pnpm build ```
- ้€€ๅ‡บไปฃ็ขผ 134๏ผˆJavaScript ๅ †ๅ…งๅญ˜ไธ่ถณ๏ผ‰ - ๅฆ‚ๆžœๅœจ้‹่กŒไธŠ่ฟฐ `build` ่…ณๆœฌๆ™‚้‡ๅˆฐๆญค้Œฏ่ชค๏ผŒ่ซ‹ๅ˜—่ฉฆๅขžๅŠ  Node.js ๅ †ๅคงๅฐไธฆ้‡ๆ–ฐ้‹่กŒ่…ณๆœฌ๏ผš + Exit code 134๏ผˆJavaScript heap out of memory๏ผ‰ + ๅฆ‚ๆžœๅœจ้‹่กŒไธŠ่ฟฐ `build` ่…ณๆœฌๆ™‚้‡ๅˆฐๆญค้Œฏ่ชค๏ผŒ่ซ‹ๅ˜—่ฉฆๅขžๅŠ  Node.js ไธญ็š„ Heap ่จ˜ๆ†ถ้ซ”ๅคงๅฐไธฆ้‡ๆ–ฐ้‹่กŒ่…ณๆœฌ๏ผš export NODE_OPTIONS="--max-old-space-size=4096" pnpm build @@ -124,9 +118,9 @@ Flowise ๅœจๅ–ฎๅ€‹ mono ๅญ˜ๅ„ฒๅบซไธญๆœ‰ 3 ๅ€‹ไธๅŒ็š„ๆจกๅกŠใ€‚ pnpm start ``` - ๆ‚จ็พๅœจๅฏไปฅ่จชๅ• [http://localhost:3000](http://localhost:3000) + ๆ‚จ็พๅœจๅฏไปฅ้–‹ๅ•Ÿ [http://localhost:3000](http://localhost:3000) -6. ๅฐๆ–ผ้–‹็™ผๆง‹ๅปบ๏ผš +6. ๅฐๆ–ผ้–‹็™ผๅปบ็ฝฎ๏ผš - ๅœจ `packages/ui` ไธญๅ‰ตๅปบ `.env` ๆ–‡ไปถไธฆๆŒ‡ๅฎš `VITE_PORT`๏ผˆๅƒ่€ƒ `.env.example`๏ผ‰ - ๅœจ `packages/server` ไธญๅ‰ตๅปบ `.env` ๆ–‡ไปถไธฆๆŒ‡ๅฎš `PORT`๏ผˆๅƒ่€ƒ `.env.example`๏ผ‰ @@ -136,28 +130,19 @@ Flowise ๅœจๅ–ฎๅ€‹ mono ๅญ˜ๅ„ฒๅบซไธญๆœ‰ 3 ๅ€‹ไธๅŒ็š„ๆจกๅกŠใ€‚ pnpm dev ``` - ไปปไฝ•ไปฃ็ขผๆ›ดๆ”น้ƒฝๆœƒ่‡ชๅ‹•้‡ๆ–ฐๅŠ ่ผ‰ๆ‡‰็”จ็จ‹ๅบ [http://localhost:8080](http://localhost:8080) + ไปปไฝ•็จ‹ๅผ็ขผๆ›ดๆ”น้ƒฝๆœƒ่‡ชๅ‹•้‡ๆ–ฐๅŠ ่ผ‰ๆ‡‰็”จ็จ‹ๅผ [http://localhost:8080](http://localhost:8080) -## ๐Ÿ”’ ่ช่ญ‰ +## ๐ŸŒฑ ็’ฐๅขƒ่ฎŠๆ•ธ -่ฆๅ•Ÿ็”จๆ‡‰็”จ็ดšๅˆฅ็š„่บซไปฝ้ฉ—่ญ‰๏ผŒ่ซ‹ๅœจ `packages/server` ไธญ็š„ `.env` ๆ–‡ไปถไธญๆทปๅŠ  `FLOWISE_USERNAME` ๅ’Œ `FLOWISE_PASSWORD`๏ผš - -``` -FLOWISE_USERNAME=user -FLOWISE_PASSWORD=1234 -``` - -## ๐ŸŒฑ ็’ฐๅขƒ่ฎŠ้‡ - -Flowise ๆ”ฏๆŒไธๅŒ็š„็’ฐๅขƒ่ฎŠ้‡ไพ†้…็ฝฎๆ‚จ็š„ๅฏฆไพ‹ใ€‚ๆ‚จๅฏไปฅๅœจ `packages/server` ๆ–‡ไปถๅคพไธญ็š„ `.env` ๆ–‡ไปถไธญๆŒ‡ๅฎšไปฅไธ‹่ฎŠ้‡ใ€‚้–ฑ่ฎ€ [ๆ›ดๅคš](https://github.com/FlowiseAI/Flowise/blob/main/CONTRIBUTING.md#-env-variables) +Flowise ๆ”ฏๆŒไธๅŒ็š„็’ฐๅขƒ่ฎŠๆ•ธไพ†้…็ฝฎๆ‚จ็š„ๅฏฆไพ‹ใ€‚ๆ‚จๅฏไปฅๅœจ `packages/server` ๆ–‡ไปถๅคพไธญ็š„ `.env` ๆ–‡ไปถไธญๆŒ‡ๅฎšไปฅไธ‹่ฎŠๆ•ธใ€‚้–ฑ่ฎ€ [ๆ›ดๅคš](https://github.com/FlowiseAI/Flowise/blob/main/CONTRIBUTING.md#-env-variables) ## ๐Ÿ“– ๆ–‡ๆช” [Flowise ๆ–‡ๆช”](https://docs.flowiseai.com/) -## ๐ŸŒ ่‡ชๆˆ‘ๆ‰˜็ฎก +## ๐ŸŒ ่‡ช่กŒๆžถ่จญ -ๅœจๆ‚จ็พๆœ‰็š„ๅŸบ็คŽ่จญๆ–ฝไธญ้ƒจ็ฝฒ Flowise ่‡ชๆˆ‘ๆ‰˜็ฎก๏ผŒๆˆ‘ๅ€‘ๆ”ฏๆŒๅ„็จฎ [้ƒจ็ฝฒ](https://docs.flowiseai.com/configuration/deployment) +ๅœจๆ‚จ็พๆœ‰็š„ๅŸบ็คŽ่จญๆ–ฝไธญ้ƒจ็ฝฒ Flowise๏ผŒๆˆ‘ๅ€‘ๆ”ฏๆŒๅ„็จฎ่‡ช่กŒๆžถ่จญ้ธ้ … [้ƒจ็ฝฒ](https://docs.flowiseai.com/configuration/deployment) - [AWS](https://docs.flowiseai.com/configuration/deployment/aws) - [Azure](https://docs.flowiseai.com/configuration/deployment/azure) @@ -193,9 +178,9 @@ Flowise ๆ”ฏๆŒไธๅŒ็š„็’ฐๅขƒ่ฎŠ้‡ไพ†้…็ฝฎๆ‚จ็š„ๅฏฆไพ‹ใ€‚ๆ‚จๅฏไปฅๅœจ `package
-## โ˜๏ธ Flowise ้›ฒ +## โ˜๏ธ Flowise ้›ฒ็ซฏๅนณๅฐ -[้–‹ๅง‹ไฝฟ็”จ Flowise ้›ฒ](https://flowiseai.com/) +[้–‹ๅง‹ไฝฟ็”จ Flowise ้›ฒ็ซฏๅนณๅฐ](https://flowiseai.com/) ## ๐Ÿ™‹ ๆ”ฏๆŒ @@ -209,9 +194,9 @@ Flowise ๆ”ฏๆŒไธๅŒ็š„็’ฐๅขƒ่ฎŠ้‡ไพ†้…็ฝฎๆ‚จ็š„ๅฏฆไพ‹ใ€‚ๆ‚จๅฏไปฅๅœจ `package -่ซ‹ๅƒ้–ฑ [่ฒข็ปๆŒ‡ๅ—](CONTRIBUTING.md)ใ€‚ๅฆ‚ๆžœๆ‚จๆœ‰ไปปไฝ•ๅ•้กŒๆˆ–ๅ•้กŒ๏ผŒ่ซ‹้€š้Ž [Discord](https://discord.gg/jbaHfsRVBW) ่ˆ‡ๆˆ‘ๅ€‘่ฏ็นซใ€‚ +่ซ‹ๅƒ้–ฑ [่ฒข็ปๆŒ‡ๅ—](../CONTRIBUTING.md)ใ€‚ๅฆ‚ๆžœๆ‚จๆœ‰ไปปไฝ•ๅ•้กŒๆˆ–ๅ•้กŒ๏ผŒ่ซ‹้€้Ž [Discord](https://discord.gg/jbaHfsRVBW) ่ˆ‡ๆˆ‘ๅ€‘่ฏ็นซใ€‚ [![Star History Chart](https://api.star-history.com/svg?repos=FlowiseAI/Flowise&type=Timeline)](https://star-history.com/#FlowiseAI/Flowise&Date) ## ๐Ÿ“„ ่จฑๅฏ่ญ‰ -ๆญคๅญ˜ๅ„ฒๅบซไธญ็š„ๆบไปฃ็ขผๆ นๆ“š [Apache ่จฑๅฏ่ญ‰็‰ˆๆœฌ 2.0](LICENSE.md) ๆไพ›ใ€‚ +ๆญคๅ„ฒๅญ˜ๅบซไธญ็š„ๅŽŸๅง‹็ขผๆ นๆ“š [Apache 2.0 ๆŽˆๆฌŠๆขๆฌพ](../LICENSE.md) ๆŽˆๆฌŠไฝฟ็”จใ€‚ diff --git a/i18n/README-ZH.md b/i18n/README-ZH.md index 5f313fb32..d744d7392 100644 --- a/i18n/README-ZH.md +++ b/i18n/README-ZH.md @@ -31,12 +31,6 @@ npx flowise start ``` - ไฝฟ็”จ็”จๆˆทๅๅ’Œๅฏ†็  - - ```bash - npx flowise start --FLOWISE_USERNAME=user --FLOWISE_PASSWORD=1234 - ``` - 3. ๆ‰“ๅผ€ [http://localhost:3000](http://localhost:3000) ## ๐Ÿณ Docker @@ -127,15 +121,6 @@ Flowise ๅœจไธ€ไธชๅ•ไธ€็š„ไปฃ็ ๅบ“ไธญๆœ‰ 3 ไธชไธๅŒ็š„ๆจกๅ—ใ€‚ ไปปไฝ•ไปฃ็ ๆ›ดๆ”น้ƒฝไผš่‡ชๅŠจ้‡ๆ–ฐๅŠ ่ฝฝๅบ”็”จ็จ‹ๅบ๏ผŒ่ฎฟ้—ฎ [http://localhost:8080](http://localhost:8080) -## ๐Ÿ”’ ่ฎค่ฏ - -่ฆๅฏ็”จๅบ”็”จ็จ‹ๅบ็บง่บซไปฝ้ชŒ่ฏ๏ผŒๅœจ `packages/server` ็š„ `.env` ๆ–‡ไปถไธญๆทปๅŠ  `FLOWISE_USERNAME` ๅ’Œ `FLOWISE_PASSWORD`๏ผš - -``` -FLOWISE_USERNAME=user -FLOWISE_PASSWORD=1234 -``` - ## ๐ŸŒฑ ็Žฏๅขƒๅ˜้‡ Flowise ๆ”ฏๆŒไธๅŒ็š„็Žฏๅขƒๅ˜้‡ๆฅ้…็ฝฎๆ‚จ็š„ๅฎžไพ‹ใ€‚ๆ‚จๅฏไปฅๅœจ `packages/server` ๆ–‡ไปถๅคนไธญ็š„ `.env` ๆ–‡ไปถไธญๆŒ‡ๅฎšไปฅไธ‹ๅ˜้‡ใ€‚ไบ†่งฃๆ›ดๅคšไฟกๆฏ๏ผŒ่ฏท้˜…่ฏป[ๆ–‡ๆกฃ](https://github.com/FlowiseAI/Flowise/blob/main/CONTRIBUTING.md#-env-variables) @@ -197,8 +182,8 @@ Flowise ๆ”ฏๆŒไธๅŒ็š„็Žฏๅขƒๅ˜้‡ๆฅ้…็ฝฎๆ‚จ็š„ๅฎžไพ‹ใ€‚ๆ‚จๅฏไปฅๅœจ `package -ๅ‚่ง[่ดก็ŒฎๆŒ‡ๅ—](CONTRIBUTING.md)ใ€‚ๅฆ‚ๆžœๆ‚จๆœ‰ไปปไฝ•้—ฎ้ข˜ๆˆ–้—ฎ้ข˜๏ผŒ่ฏทๅœจ[Discord](https://discord.gg/jbaHfsRVBW)ไธŠไธŽๆˆ‘ไปฌ่”็ณปใ€‚ +ๅ‚่ง[่ดก็ŒฎๆŒ‡ๅ—](CONTRIBUTING-ZH.md)ใ€‚ๅฆ‚ๆžœๆ‚จๆœ‰ไปปไฝ•้—ฎ้ข˜ๆˆ–้—ฎ้ข˜๏ผŒ่ฏทๅœจ[Discord](https://discord.gg/jbaHfsRVBW)ไธŠไธŽๆˆ‘ไปฌ่”็ณปใ€‚ ## ๐Ÿ“„ ่ฎธๅฏ่ฏ -ๆญคไปฃ็ ๅบ“ไธญ็š„ๆบไปฃ็ ๅœจ[Apache License Version 2.0 ่ฎธๅฏ่ฏ](LICENSE.md)ไธ‹ๆไพ›ใ€‚ +ๆญคไปฃ็ ๅบ“ไธญ็š„ๆบไปฃ็ ๅœจ[Apache License Version 2.0 ่ฎธๅฏ่ฏ](../LICENSE.md)ไธ‹ๆไพ›ใ€‚ diff --git a/metrics/otel/compose.yaml b/metrics/otel/compose.yaml index 4567588ff..974081979 100644 --- a/metrics/otel/compose.yaml +++ b/metrics/otel/compose.yaml @@ -1,15 +1,17 @@ -version: "2" +version: '2' services: - otel-collector: - image: otel/opentelemetry-collector-contrib - command: ["--config=/etc/otelcol-contrib/config.yaml", "--feature-gates=-exporter.datadogexporter.DisableAPMStats", "${OTELCOL_ARGS}"] - volumes: - - ./otel.config.yml:/etc/otelcol-contrib/config.yaml - ports: - - 1888:1888 # pprof extension - - 8888:8888 # Prometheus metrics exposed by the Collector - - 8889:8889 # Prometheus exporter metrics - - 13133:13133 # health_check extension - - 4317:4317 # OTLP gRPC receiver - - 4318:4318 # OTLP http receiver - - 55679:55679 # zpages extension + otel-collector: + read_only: true + image: otel/opentelemetry-collector-contrib + command: + ['--config=/etc/otelcol-contrib/config.yaml', '--feature-gates=-exporter.datadogexporter.DisableAPMStats', '${OTELCOL_ARGS}'] + volumes: + - ./otel.config.yml:/etc/otelcol-contrib/config.yaml + ports: + - 1888:1888 # pprof extension + - 8888:8888 # Prometheus metrics exposed by the Collector + - 8889:8889 # Prometheus exporter metrics + - 13133:13133 # health_check extension + - 4317:4317 # OTLP gRPC receiver + - 4318:4318 # OTLP http receiver + - 55679:55679 # zpages extension diff --git a/package.json b/package.json index f7855fef5..9ee93d127 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "flowise", - "version": "3.0.0", + "version": "3.0.11", "private": true, "homepage": "https://flowiseai.com", "workspaces": [ @@ -20,6 +20,10 @@ "start-worker": "run-script-os", "start-worker:windows": "cd packages/server/bin && run worker", "start-worker:default": "cd packages/server/bin && ./run worker", + "user": "run-script-os", + "user:windows": "cd packages/server/bin && run user", + "user:default": "cd packages/server/bin && ./run user", + "test": "turbo run test", "clean": "pnpm --filter \"./packages/**\" clean", "nuke": "pnpm --filter \"./packages/**\" nuke && rimraf node_modules .turbo", "format": "prettier --write \"**/*.{ts,tsx,md}\"", @@ -62,20 +66,26 @@ "sqlite3" ], "overrides": { - "axios": "1.7.9", + "axios": "1.12.0", "body-parser": "2.0.2", "braces": "3.0.3", "cross-spawn": "7.0.6", + "form-data": "4.0.4", "glob-parent": "6.0.2", "http-proxy-middleware": "3.0.3", "json5": "2.2.3", "nth-check": "2.1.1", "path-to-regexp": "0.1.12", "prismjs": "1.29.0", + "rollup": "4.45.0", "semver": "7.7.1", "set-value": "4.1.0", + "solid-js": "1.9.7", + "tar-fs": "3.1.0", "unset-value": "2.0.1", - "webpack-dev-middleware": "7.4.2" + "webpack-dev-middleware": "7.4.2", + "ws": "8.18.3", + "xlsx": "https://cdn.sheetjs.com/xlsx-0.20.3/xlsx-0.20.3.tgz" } }, "engines": { @@ -85,7 +95,7 @@ "resolutions": { "@google/generative-ai": "^0.24.0", "@grpc/grpc-js": "^1.10.10", - "@langchain/core": "0.3.37", + "@langchain/core": "0.3.61", "@qdrant/openapi-typescript-fetch": "1.2.6", "openai": "4.96.0", "protobufjs": "7.4.0" diff --git a/packages/api-documentation/package.json b/packages/api-documentation/package.json index 780920f7c..849a10a29 100644 --- a/packages/api-documentation/package.json +++ b/packages/api-documentation/package.json @@ -1,6 +1,6 @@ { "name": "flowise-api", - "version": "1.0.2", + "version": "1.0.3", "description": "Flowise API documentation server", "scripts": { "build": "tsc", diff --git a/packages/api-documentation/src/yml/swagger.yml b/packages/api-documentation/src/yml/swagger.yml index 21b8f1dd0..00cf975a1 100644 --- a/packages/api-documentation/src/yml/swagger.yml +++ b/packages/api-documentation/src/yml/swagger.yml @@ -1216,15 +1216,18 @@ paths: security: - bearerAuth: [] operationId: createPrediction - summary: Create a new prediction - description: Create a new prediction + summary: Send message to flow and get AI response + description: | + Send a message to your flow and receive an AI-generated response. This is the primary endpoint for interacting with your flows and assistants. + **Authentication**: API key may be required depending on flow settings. parameters: - in: path name: id required: true schema: type: string - description: Chatflow ID + description: Flow ID - the unique identifier of your flow + example: 'your-flow-id' requestBody: content: application/json: @@ -1236,24 +1239,36 @@ paths: properties: question: type: string - description: Question to ask during the prediction process + description: Question/message to send to the flow + example: 'Analyze this uploaded file and summarize its contents' files: type: array items: type: string format: binary - description: Files to be uploaded - modelName: + description: Files to be uploaded (images, audio, documents, etc.) + streaming: + type: boolean + description: Enable streaming responses + default: false + overrideConfig: type: string - nullable: true - example: '' - description: Other override configurations + description: JSON string of configuration overrides + example: '{"sessionId":"user-123","temperature":0.7}' + history: + type: string + description: JSON string of conversation history + example: '[{"role":"userMessage","content":"Hello"},{"role":"apiMessage","content":"Hi there!"}]' + humanInput: + type: string + description: JSON string of human input for resuming execution + example: '{"type":"proceed","feedback":"Continue with the plan"}' required: - question required: true responses: '200': - description: Prediction created successfully + description: Successful prediction response content: application/json: schema: @@ -1261,45 +1276,106 @@ paths: properties: text: type: string - description: The result of the prediction + description: The AI-generated response text + example: 'Artificial intelligence (AI) is a branch of computer science that focuses on creating systems capable of performing tasks that typically require human intelligence.' json: type: object - description: The result of the prediction in JSON format if available + description: The result in JSON format if available (for structured outputs) + nullable: true question: type: string - description: The question asked during the prediction process + description: The original question/message sent to the flow + example: 'What is artificial intelligence?' chatId: type: string - description: The chat ID associated with the prediction + description: Unique identifier for the chat session + example: 'chat-12345' chatMessageId: type: string - description: The chat message ID associated with the prediction + description: Unique identifier for this specific message + example: 'msg-67890' sessionId: type: string - description: The session ID associated with the prediction + description: Session identifier for conversation continuity + example: 'user-session-123' + nullable: true memoryType: type: string - description: The memory type associated with the prediction + description: Type of memory used for conversation context + example: 'Buffer Memory' + nullable: true sourceDocuments: type: array + description: Documents retrieved from vector store (if RAG is enabled) items: $ref: '#/components/schemas/Document' + nullable: true usedTools: type: array + description: Tools that were invoked during the response generation items: $ref: '#/components/schemas/UsedTool' - fileAnnotations: - type: array - items: - $ref: '#/components/schemas/FileAnnotation' + nullable: true '400': - description: Invalid input provided + description: Bad Request - Invalid input provided or request format is incorrect + content: + application/json: + schema: + type: object + properties: + error: + type: string + example: 'Invalid request format. Check required fields and parameter types.' + '401': + description: Unauthorized - API key required or invalid + content: + application/json: + schema: + type: object + properties: + error: + type: string + example: 'Unauthorized access. Please verify your API key.' '404': - description: Chatflow not found + description: Not Found - Chatflow with specified ID does not exist + content: + application/json: + schema: + type: object + properties: + error: + type: string + example: 'Chatflow not found. Please verify the chatflow ID.' + '413': + description: Payload Too Large - Request payload exceeds size limits + content: + application/json: + schema: + type: object + properties: + error: + type: string + example: 'Request payload too large. Please reduce file sizes or split large requests.' '422': - description: Validation error + description: Validation Error - Request validation failed + content: + application/json: + schema: + type: object + properties: + error: + type: string + example: 'Validation failed. Check parameter requirements and data types.' '500': - description: Internal server error + description: Internal Server Error - Flow configuration or execution error + content: + application/json: + schema: + type: object + properties: + error: + type: string + example: 'Internal server error. Check flow configuration and node settings.' /tools: post: tags: @@ -2011,13 +2087,33 @@ components: properties: question: type: string - description: The question being asked + description: The question/message to send to the flow + example: 'What is artificial intelligence?' + form: + type: object + description: The form object to send to the flow (alternative to question for Agentflow V2) + additionalProperties: true + example: + title: 'Example' + count: 1 + streaming: + type: boolean + description: Enable streaming responses for real-time output + default: false + example: false overrideConfig: type: object - description: The configuration to override the default prediction settings (optional) + description: Override flow configuration and pass variables at runtime + additionalProperties: true + example: + sessionId: 'user-session-123' + temperature: 0.7 + maxTokens: 500 + vars: + user_name: 'Alice' history: type: array - description: The history messages to be prepended (optional) + description: Previous conversation messages for context items: type: object properties: @@ -2030,8 +2126,14 @@ components: type: string description: The content of the message example: 'Hello, how can I help you?' + example: + - role: 'apiMessage' + content: "Hello! I'm an AI assistant. How can I help you today?" + - role: 'userMessage' + content: "Hi, my name is Sarah and I'm learning about AI" uploads: type: array + description: Files to upload (images, audio, documents, etc.) items: type: object properties: @@ -2051,7 +2153,42 @@ components: mime: type: string description: The MIME type of the file or resource + enum: + [ + 'image/png', + 'image/jpeg', + 'image/jpg', + 'image/gif', + 'image/webp', + 'audio/mp4', + 'audio/webm', + 'audio/wav', + 'audio/mpeg', + 'audio/ogg', + 'audio/aac' + ] example: 'image/png' + example: + - type: 'file' + name: 'example.png' + data: 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAYAAADgdz34AAABjElEQVRIS+2Vv0oDQRDG' + mime: 'image/png' + humanInput: + type: object + description: Return human feedback and resume execution from a stopped checkpoint + properties: + type: + type: string + enum: [proceed, reject] + description: Type of human input response + example: 'reject' + feedback: + type: string + description: Feedback to the last output + example: 'Include more emoji' + example: + type: 'reject' + feedback: 'Include more emoji' Tool: type: object diff --git a/packages/components/credentials/AgentflowApi.credential.ts b/packages/components/credentials/AgentflowApi.credential.ts new file mode 100644 index 000000000..72f4cefe5 --- /dev/null +++ b/packages/components/credentials/AgentflowApi.credential.ts @@ -0,0 +1,23 @@ +import { INodeParams, INodeCredential } from '../src/Interface' + +class AgentflowApi implements INodeCredential { + label: string + name: string + version: number + inputs: INodeParams[] + + constructor() { + this.label = 'Agentflow API' + this.name = 'agentflowApi' + this.version = 1.0 + this.inputs = [ + { + label: 'Agentflow Api Key', + name: 'agentflowApiKey', + type: 'password' + } + ] + } +} + +module.exports = { credClass: AgentflowApi } diff --git a/packages/components/credentials/CometApi.credential.ts b/packages/components/credentials/CometApi.credential.ts new file mode 100644 index 000000000..58ec60610 --- /dev/null +++ b/packages/components/credentials/CometApi.credential.ts @@ -0,0 +1,23 @@ +import { INodeCredential, INodeParams } from '../src/Interface' + +class CometApi implements INodeCredential { + label: string + name: string + version: number + inputs: INodeParams[] + + constructor() { + this.label = 'Comet API' + this.name = 'cometApi' + this.version = 1.0 + this.inputs = [ + { + label: 'Comet API Key', + name: 'cometApiKey', + type: 'password' + } + ] + } +} + +module.exports = { credClass: CometApi } diff --git a/packages/components/credentials/ElevenLabsApi.credential.ts b/packages/components/credentials/ElevenLabsApi.credential.ts new file mode 100644 index 000000000..89a8b275d --- /dev/null +++ b/packages/components/credentials/ElevenLabsApi.credential.ts @@ -0,0 +1,26 @@ +import { INodeParams, INodeCredential } from '../src/Interface' + +class ElevenLabsApi implements INodeCredential { + label: string + name: string + version: number + description: string + inputs: INodeParams[] + + constructor() { + this.label = 'Eleven Labs API' + this.name = 'elevenLabsApi' + this.version = 1.0 + this.description = + 'Sign up for a Eleven Labs account and create an API Key.' + this.inputs = [ + { + label: 'Eleven Labs API Key', + name: 'elevenLabsApiKey', + type: 'password' + } + ] + } +} + +module.exports = { credClass: ElevenLabsApi } diff --git a/packages/components/credentials/GmailOAuth2.credential.ts b/packages/components/credentials/GmailOAuth2.credential.ts new file mode 100644 index 000000000..38d23a154 --- /dev/null +++ b/packages/components/credentials/GmailOAuth2.credential.ts @@ -0,0 +1,63 @@ +import { INodeParams, INodeCredential } from '../src/Interface' +const scopes = [ + 'https://www.googleapis.com/auth/gmail.readonly', + 'https://www.googleapis.com/auth/gmail.compose', + 'https://www.googleapis.com/auth/gmail.modify', + 'https://www.googleapis.com/auth/gmail.labels' +] + +class GmailOAuth2 implements INodeCredential { + label: string + name: string + version: number + inputs: INodeParams[] + description: string + + constructor() { + this.label = 'Gmail OAuth2' + this.name = 'gmailOAuth2' + this.version = 1.0 + this.description = + 'You can find the setup instructions here' + this.inputs = [ + { + label: 'Authorization URL', + name: 'authorizationUrl', + type: 'string', + default: 'https://accounts.google.com/o/oauth2/v2/auth' + }, + { + label: 'Access Token URL', + name: 'accessTokenUrl', + type: 'string', + default: 'https://oauth2.googleapis.com/token' + }, + { + label: 'Client ID', + name: 'clientId', + type: 'string' + }, + { + label: 'Client Secret', + name: 'clientSecret', + type: 'password' + }, + { + label: 'Additional Parameters', + name: 'additionalParameters', + type: 'string', + default: 'access_type=offline&prompt=consent', + hidden: true + }, + { + label: 'Scope', + name: 'scope', + type: 'string', + hidden: true, + default: scopes.join(' ') + } + ] + } +} + +module.exports = { credClass: GmailOAuth2 } diff --git a/packages/components/credentials/GoogleCalendarOAuth2.credential.ts b/packages/components/credentials/GoogleCalendarOAuth2.credential.ts new file mode 100644 index 000000000..5792067a3 --- /dev/null +++ b/packages/components/credentials/GoogleCalendarOAuth2.credential.ts @@ -0,0 +1,58 @@ +import { INodeParams, INodeCredential } from '../src/Interface' +const scopes = ['https://www.googleapis.com/auth/calendar', 'https://www.googleapis.com/auth/calendar.events'] + +class GoogleCalendarOAuth2 implements INodeCredential { + label: string + name: string + version: number + inputs: INodeParams[] + description: string + + constructor() { + this.label = 'Google Calendar OAuth2' + this.name = 'googleCalendarOAuth2' + this.version = 1.0 + this.description = + 'You can find the setup instructions here' + this.inputs = [ + { + label: 'Authorization URL', + name: 'authorizationUrl', + type: 'string', + default: 'https://accounts.google.com/o/oauth2/v2/auth' + }, + { + label: 'Access Token URL', + name: 'accessTokenUrl', + type: 'string', + default: 'https://oauth2.googleapis.com/token' + }, + { + label: 'Client ID', + name: 'clientId', + type: 'string' + }, + { + label: 'Client Secret', + name: 'clientSecret', + type: 'password' + }, + { + label: 'Additional Parameters', + name: 'additionalParameters', + type: 'string', + default: 'access_type=offline&prompt=consent', + hidden: true + }, + { + label: 'Scope', + name: 'scope', + type: 'string', + hidden: true, + default: scopes.join(' ') + } + ] + } +} + +module.exports = { credClass: GoogleCalendarOAuth2 } diff --git a/packages/components/credentials/GoogleDocsOAuth2.credential.ts b/packages/components/credentials/GoogleDocsOAuth2.credential.ts new file mode 100644 index 000000000..24cb5d6d5 --- /dev/null +++ b/packages/components/credentials/GoogleDocsOAuth2.credential.ts @@ -0,0 +1,62 @@ +import { INodeParams, INodeCredential } from '../src/Interface' +const scopes = [ + 'https://www.googleapis.com/auth/documents', + 'https://www.googleapis.com/auth/drive', + 'https://www.googleapis.com/auth/drive.file' +] + +class GoogleDocsOAuth2 implements INodeCredential { + label: string + name: string + version: number + inputs: INodeParams[] + description: string + + constructor() { + this.label = 'Google Docs OAuth2' + this.name = 'googleDocsOAuth2' + this.version = 1.0 + this.description = + 'You can find the setup instructions here' + this.inputs = [ + { + label: 'Authorization URL', + name: 'authorizationUrl', + type: 'string', + default: 'https://accounts.google.com/o/oauth2/v2/auth' + }, + { + label: 'Access Token URL', + name: 'accessTokenUrl', + type: 'string', + default: 'https://oauth2.googleapis.com/token' + }, + { + label: 'Client ID', + name: 'clientId', + type: 'string' + }, + { + label: 'Client Secret', + name: 'clientSecret', + type: 'password' + }, + { + label: 'Additional Parameters', + name: 'additionalParameters', + type: 'string', + default: 'access_type=offline&prompt=consent', + hidden: true + }, + { + label: 'Scope', + name: 'scope', + type: 'string', + hidden: true, + default: scopes.join(' ') + } + ] + } +} + +module.exports = { credClass: GoogleDocsOAuth2 } diff --git a/packages/components/credentials/GoogleDriveOAuth2.credential.ts b/packages/components/credentials/GoogleDriveOAuth2.credential.ts new file mode 100644 index 000000000..de027a8e4 --- /dev/null +++ b/packages/components/credentials/GoogleDriveOAuth2.credential.ts @@ -0,0 +1,62 @@ +import { INodeParams, INodeCredential } from '../src/Interface' +const scopes = [ + 'https://www.googleapis.com/auth/drive', + 'https://www.googleapis.com/auth/drive.appdata', + 'https://www.googleapis.com/auth/drive.photos.readonly' +] + +class GoogleDriveOAuth2 implements INodeCredential { + label: string + name: string + version: number + inputs: INodeParams[] + description: string + + constructor() { + this.label = 'Google Drive OAuth2' + this.name = 'googleDriveOAuth2' + this.version = 1.0 + this.description = + 'You can find the setup instructions here' + this.inputs = [ + { + label: 'Authorization URL', + name: 'authorizationUrl', + type: 'string', + default: 'https://accounts.google.com/o/oauth2/v2/auth' + }, + { + label: 'Access Token URL', + name: 'accessTokenUrl', + type: 'string', + default: 'https://oauth2.googleapis.com/token' + }, + { + label: 'Client ID', + name: 'clientId', + type: 'string' + }, + { + label: 'Client Secret', + name: 'clientSecret', + type: 'password' + }, + { + label: 'Additional Parameters', + name: 'additionalParameters', + type: 'string', + default: 'access_type=offline&prompt=consent', + hidden: true + }, + { + label: 'Scope', + name: 'scope', + type: 'string', + hidden: true, + default: scopes.join(' ') + } + ] + } +} + +module.exports = { credClass: GoogleDriveOAuth2 } diff --git a/packages/components/credentials/GoogleSheetsOAuth2.credential.ts b/packages/components/credentials/GoogleSheetsOAuth2.credential.ts new file mode 100644 index 000000000..3e2147922 --- /dev/null +++ b/packages/components/credentials/GoogleSheetsOAuth2.credential.ts @@ -0,0 +1,62 @@ +import { INodeParams, INodeCredential } from '../src/Interface' +const scopes = [ + 'https://www.googleapis.com/auth/drive.file', + 'https://www.googleapis.com/auth/spreadsheets', + 'https://www.googleapis.com/auth/drive.metadata' +] + +class GoogleSheetsOAuth2 implements INodeCredential { + label: string + name: string + version: number + inputs: INodeParams[] + description: string + + constructor() { + this.label = 'Google Sheets OAuth2' + this.name = 'googleSheetsOAuth2' + this.version = 1.0 + this.description = + 'You can find the setup instructions here' + this.inputs = [ + { + label: 'Authorization URL', + name: 'authorizationUrl', + type: 'string', + default: 'https://accounts.google.com/o/oauth2/v2/auth' + }, + { + label: 'Access Token URL', + name: 'accessTokenUrl', + type: 'string', + default: 'https://oauth2.googleapis.com/token' + }, + { + label: 'Client ID', + name: 'clientId', + type: 'string' + }, + { + label: 'Client Secret', + name: 'clientSecret', + type: 'password' + }, + { + label: 'Additional Parameters', + name: 'additionalParameters', + type: 'string', + default: 'access_type=offline&prompt=consent', + hidden: true + }, + { + label: 'Scope', + name: 'scope', + type: 'string', + hidden: true, + default: scopes.join(' ') + } + ] + } +} + +module.exports = { credClass: GoogleSheetsOAuth2 } diff --git a/packages/components/credentials/MicrosoftOutlookOAuth2.credential.ts b/packages/components/credentials/MicrosoftOutlookOAuth2.credential.ts new file mode 100644 index 000000000..0308969a4 --- /dev/null +++ b/packages/components/credentials/MicrosoftOutlookOAuth2.credential.ts @@ -0,0 +1,66 @@ +import { INodeParams, INodeCredential } from '../src/Interface' + +const scopes = [ + 'openid', + 'offline_access', + 'Contacts.Read', + 'Contacts.ReadWrite', + 'Calendars.Read', + 'Calendars.Read.Shared', + 'Calendars.ReadWrite', + 'Mail.Read', + 'Mail.ReadWrite', + 'Mail.ReadWrite.Shared', + 'Mail.Send', + 'Mail.Send.Shared', + 'MailboxSettings.Read' +] + +class MsoftOutlookOAuth2 implements INodeCredential { + label: string + name: string + version: number + description: string + inputs: INodeParams[] + + constructor() { + this.label = 'Microsoft Outlook OAuth2' + this.name = 'microsoftOutlookOAuth2' + this.version = 1.0 + this.description = + 'You can find the setup instructions here' + this.inputs = [ + { + label: 'Authorization URL', + name: 'authorizationUrl', + type: 'string', + default: 'https://login.microsoftonline.com//oauth2/v2.0/authorize' + }, + { + label: 'Access Token URL', + name: 'accessTokenUrl', + type: 'string', + default: 'https://login.microsoftonline.com//oauth2/v2.0/token' + }, + { + label: 'Client ID', + name: 'clientId', + type: 'string' + }, + { + label: 'Client Secret', + name: 'clientSecret', + type: 'password' + }, + { + label: 'Scope', + name: 'scope', + type: 'string', + hidden: true, + default: scopes.join(' ') + } + ] + } +} + +module.exports = { credClass: MsoftOutlookOAuth2 } diff --git a/packages/components/credentials/MicrosoftTeamsOAuth2.credential.ts b/packages/components/credentials/MicrosoftTeamsOAuth2.credential.ts new file mode 100644 index 000000000..ffda846ae --- /dev/null +++ b/packages/components/credentials/MicrosoftTeamsOAuth2.credential.ts @@ -0,0 +1,87 @@ +import { INodeParams, INodeCredential } from '../src/Interface' + +// Comprehensive scopes for Microsoft Teams operations +const scopes = [ + // Basic authentication + 'openid', + 'offline_access', + + // User permissions + 'User.Read', + 'User.ReadWrite.All', + + // Teams and Groups + 'Group.ReadWrite.All', + 'Team.ReadBasic.All', + 'Team.Create', + 'TeamMember.ReadWrite.All', + + // Channels + 'Channel.ReadBasic.All', + 'Channel.Create', + 'Channel.Delete.All', + 'ChannelMember.ReadWrite.All', + + // Chat operations + 'Chat.ReadWrite', + 'Chat.Create', + 'ChatMember.ReadWrite', + + // Messages + 'ChatMessage.Send', + 'ChatMessage.Read', + 'ChannelMessage.Send', + 'ChannelMessage.Read.All', + + // Reactions and advanced features + 'TeamsActivity.Send' +] + +class MsoftTeamsOAuth2 implements INodeCredential { + label: string + name: string + version: number + inputs: INodeParams[] + description: string + + constructor() { + this.label = 'Microsoft Teams OAuth2' + this.name = 'microsoftTeamsOAuth2' + this.version = 1.0 + this.description = + 'You can find the setup instructions here' + this.inputs = [ + { + label: 'Authorization URL', + name: 'authorizationUrl', + type: 'string', + default: 'https://login.microsoftonline.com//oauth2/v2.0/authorize' + }, + { + label: 'Access Token URL', + name: 'accessTokenUrl', + type: 'string', + default: 'https://login.microsoftonline.com//oauth2/v2.0/token' + }, + { + label: 'Client ID', + name: 'clientId', + type: 'string' + }, + { + label: 'Client Secret', + name: 'clientSecret', + type: 'password' + }, + { + label: 'Scope', + name: 'scope', + type: 'string', + hidden: true, + default: scopes.join(' ') + } + ] + } +} + +module.exports = { credClass: MsoftTeamsOAuth2 } diff --git a/packages/components/credentials/OxylabsApi.credential.ts b/packages/components/credentials/OxylabsApi.credential.ts new file mode 100644 index 000000000..4ecce3c8e --- /dev/null +++ b/packages/components/credentials/OxylabsApi.credential.ts @@ -0,0 +1,30 @@ +import { INodeParams, INodeCredential } from '../src/Interface' + +class OxylabsApiCredential implements INodeCredential { + label: string + name: string + version: number + description: string + inputs: INodeParams[] + + constructor() { + this.label = 'Oxylabs API' + this.name = 'oxylabsApi' + this.version = 1.0 + this.description = 'Oxylabs API credentials description, to add more info' + this.inputs = [ + { + label: 'Oxylabs Username', + name: 'username', + type: 'string' + }, + { + label: 'Oxylabs Password', + name: 'password', + type: 'password' + } + ] + } +} + +module.exports = { credClass: OxylabsApiCredential } diff --git a/packages/components/credentials/SambanovaApi.credential.ts b/packages/components/credentials/SambanovaApi.credential.ts new file mode 100644 index 000000000..60a7e13d8 --- /dev/null +++ b/packages/components/credentials/SambanovaApi.credential.ts @@ -0,0 +1,23 @@ +import { INodeParams, INodeCredential } from '../src/Interface' + +class SambanovaApi implements INodeCredential { + label: string + name: string + version: number + inputs: INodeParams[] + + constructor() { + this.label = 'Sambanova API' + this.name = 'sambanovaApi' + this.version = 1.0 + this.inputs = [ + { + label: 'Sambanova Api Key', + name: 'sambanovaApiKey', + type: 'password' + } + ] + } +} + +module.exports = { credClass: SambanovaApi } diff --git a/packages/components/credentials/TeradataBearerToken.credential.ts b/packages/components/credentials/TeradataBearerToken.credential.ts new file mode 100644 index 000000000..d0a863041 --- /dev/null +++ b/packages/components/credentials/TeradataBearerToken.credential.ts @@ -0,0 +1,26 @@ +import { INodeParams, INodeCredential } from '../src/Interface' + +class TeradataBearerTokenCredential implements INodeCredential { + label: string + name: string + description: string + version: number + inputs: INodeParams[] + + constructor() { + this.label = 'Teradata Bearer Token' + this.name = 'teradataBearerToken' + this.version = 1.0 + this.description = + 'Refer to official guide on how to get Teradata Bearer Token' + this.inputs = [ + { + label: 'Token', + name: 'token', + type: 'password' + } + ] + } +} + +module.exports = { credClass: TeradataBearerTokenCredential } diff --git a/packages/components/credentials/TeradataTD2.credential.ts b/packages/components/credentials/TeradataTD2.credential.ts new file mode 100644 index 000000000..ae3d8f042 --- /dev/null +++ b/packages/components/credentials/TeradataTD2.credential.ts @@ -0,0 +1,28 @@ +import { INodeParams, INodeCredential } from '../src/Interface' + +class TeradataTD2Credential implements INodeCredential { + label: string + name: string + version: number + inputs: INodeParams[] + + constructor() { + this.label = 'Teradata TD2 Auth' + this.name = 'teradataTD2Auth' + this.version = 1.0 + this.inputs = [ + { + label: 'Teradata TD2 Auth Username', + name: 'tdUsername', + type: 'string' + }, + { + label: 'Teradata TD2 Auth Password', + name: 'tdPassword', + type: 'password' + } + ] + } +} + +module.exports = { credClass: TeradataTD2Credential } diff --git a/packages/components/credentials/TeradataVectorStoreApi.credential.ts b/packages/components/credentials/TeradataVectorStoreApi.credential.ts new file mode 100644 index 000000000..9f613cf6c --- /dev/null +++ b/packages/components/credentials/TeradataVectorStoreApi.credential.ts @@ -0,0 +1,47 @@ +import { INodeParams, INodeCredential } from '../src/Interface' + +class TeradataVectorStoreApiCredentials implements INodeCredential { + label: string + name: string + version: number + inputs: INodeParams[] + + constructor() { + this.label = 'Teradata Vector Store API Credentials' + this.name = 'teradataVectorStoreApiCredentials' + this.version = 1.0 + this.inputs = [ + { + label: 'Teradata Host IP', + name: 'tdHostIp', + type: 'string' + }, + { + label: 'Username', + name: 'tdUsername', + type: 'string' + }, + { + label: 'Password', + name: 'tdPassword', + type: 'password' + }, + { + label: 'Vector_Store_Base_URL', + name: 'baseURL', + description: 'Teradata Vector Store Base URL', + placeholder: `Base_URL`, + type: 'string' + }, + { + label: 'JWT Token', + name: 'jwtToken', + type: 'password', + description: 'Bearer token for JWT authentication', + optional: true + } + ] + } +} + +module.exports = { credClass: TeradataVectorStoreApiCredentials } diff --git a/packages/components/evaluation/EvaluationRunTracer.ts b/packages/components/evaluation/EvaluationRunTracer.ts new file mode 100644 index 000000000..ce286eb52 --- /dev/null +++ b/packages/components/evaluation/EvaluationRunTracer.ts @@ -0,0 +1,165 @@ +import { RunCollectorCallbackHandler } from '@langchain/core/tracers/run_collector' +import { Run } from '@langchain/core/tracers/base' +import { EvaluationRunner } from './EvaluationRunner' +import { encoding_for_model, get_encoding } from '@dqbd/tiktoken' + +export class EvaluationRunTracer extends RunCollectorCallbackHandler { + evaluationRunId: string + model: string + + constructor(id: string) { + super() + this.evaluationRunId = id + } + + async persistRun(run: Run): Promise { + return super.persistRun(run) + } + + countPromptTokens = (encoding: any, run: Run): number => { + let promptTokenCount = 0 + if (encoding) { + if (run.inputs?.messages?.length > 0 && run.inputs?.messages[0]?.length > 0) { + run.inputs.messages[0].map((message: any) => { + let content = message.content + ? message.content + : message.SystemMessage?.content + ? message.SystemMessage.content + : message.HumanMessage?.content + ? message.HumanMessage.content + : message.AIMessage?.content + ? message.AIMessage.content + : undefined + promptTokenCount += content ? encoding.encode(content).length : 0 + }) + } + if (run.inputs?.prompts?.length > 0) { + const content = run.inputs.prompts[0] + promptTokenCount += content ? encoding.encode(content).length : 0 + } + } + return promptTokenCount + } + + countCompletionTokens = (encoding: any, run: Run): number => { + let completionTokenCount = 0 + if (encoding) { + if (run.outputs?.generations?.length > 0 && run.outputs?.generations[0]?.length > 0) { + run.outputs?.generations[0].map((chunk: any) => { + let content = chunk.text ? chunk.text : chunk.message?.content ? chunk.message?.content : undefined + completionTokenCount += content ? encoding.encode(content).length : 0 + }) + } + } + return completionTokenCount + } + + extractModelName = (run: Run): string => { + return ( + (run?.serialized as any)?.kwargs?.model || + (run?.serialized as any)?.kwargs?.model_name || + (run?.extra as any)?.metadata?.ls_model_name || + (run?.extra as any)?.metadata?.fw_model_name + ) + } + + onLLMEnd?(run: Run): void | Promise { + if (run.name) { + let provider = run.name + if (provider === 'BedrockChat') { + provider = 'awsChatBedrock' + } + EvaluationRunner.addMetrics( + this.evaluationRunId, + JSON.stringify({ + provider: provider + }) + ) + } + + let model = this.extractModelName(run) + if (run.outputs?.llmOutput?.tokenUsage) { + const tokenUsage = run.outputs?.llmOutput?.tokenUsage + if (tokenUsage) { + const metric = { + completionTokens: tokenUsage.completionTokens, + promptTokens: tokenUsage.promptTokens, + model: model, + totalTokens: tokenUsage.totalTokens + } + EvaluationRunner.addMetrics(this.evaluationRunId, JSON.stringify(metric)) + } + } else if ( + run.outputs?.generations?.length > 0 && + run.outputs?.generations[0].length > 0 && + run.outputs?.generations[0][0]?.message?.usage_metadata?.total_tokens + ) { + const usage_metadata = run.outputs?.generations[0][0]?.message?.usage_metadata + if (usage_metadata) { + const metric = { + completionTokens: usage_metadata.output_tokens, + promptTokens: usage_metadata.input_tokens, + model: model || this.model, + totalTokens: usage_metadata.total_tokens + } + EvaluationRunner.addMetrics(this.evaluationRunId, JSON.stringify(metric)) + } + } else { + let encoding: any = undefined + let promptInputTokens = 0 + let completionTokenCount = 0 + try { + encoding = encoding_for_model(model as any) + promptInputTokens = this.countPromptTokens(encoding, run) + completionTokenCount = this.countCompletionTokens(encoding, run) + } catch (e) { + try { + // as tiktoken will fail for non openai models, assume that is 'cl100k_base' + encoding = get_encoding('cl100k_base') + promptInputTokens = this.countPromptTokens(encoding, run) + completionTokenCount = this.countCompletionTokens(encoding, run) + } catch (e) { + // stay silent + } + } + const metric = { + completionTokens: completionTokenCount, + promptTokens: promptInputTokens, + model: model, + totalTokens: promptInputTokens + completionTokenCount + } + EvaluationRunner.addMetrics(this.evaluationRunId, JSON.stringify(metric)) + //cleanup + this.model = '' + } + } + + async onRunUpdate(run: Run): Promise { + const json = { + [run.run_type]: elapsed(run) + } + let metric = JSON.stringify(json) + if (metric) { + EvaluationRunner.addMetrics(this.evaluationRunId, metric) + } + + if (run.run_type === 'llm') { + let model = this.extractModelName(run) + if (model) { + EvaluationRunner.addMetrics(this.evaluationRunId, JSON.stringify({ model: model })) + this.model = model + } + // OpenAI non streaming models + const estimatedTokenUsage = run.outputs?.llmOutput?.estimatedTokenUsage + if (estimatedTokenUsage && typeof estimatedTokenUsage === 'object' && Object.keys(estimatedTokenUsage).length > 0) { + EvaluationRunner.addMetrics(this.evaluationRunId, estimatedTokenUsage) + } + } + } +} + +function elapsed(run: Run) { + if (!run.end_time) return '' + const elapsed = run.end_time - run.start_time + return `${elapsed.toFixed(2)}` +} diff --git a/packages/components/evaluation/EvaluationRunTracerLlama.ts b/packages/components/evaluation/EvaluationRunTracerLlama.ts new file mode 100644 index 000000000..872b16e35 --- /dev/null +++ b/packages/components/evaluation/EvaluationRunTracerLlama.ts @@ -0,0 +1,186 @@ +import { ChatMessage, LLMEndEvent, LLMStartEvent, LLMStreamEvent, MessageContentTextDetail, RetrievalEndEvent, Settings } from 'llamaindex' +import { EvaluationRunner } from './EvaluationRunner' +import { additionalCallbacks, ICommonObject, INodeData } from '../src' +import { RetrievalStartEvent } from 'llamaindex/dist/type/llm/types' +import { AgentEndEvent, AgentStartEvent } from 'llamaindex/dist/type/agent/types' +import { encoding_for_model } from '@dqbd/tiktoken' +import { MessageContent } from '@langchain/core/messages' + +export class EvaluationRunTracerLlama { + evaluationRunId: string + static cbInit = false + static startTimes = new Map() + static models = new Map() + static tokenCounts = new Map() + + constructor(id: string) { + this.evaluationRunId = id + EvaluationRunTracerLlama.constructCallBacks() + } + + static constructCallBacks = () => { + if (!EvaluationRunTracerLlama.cbInit) { + Settings.callbackManager.on('llm-start', (event: LLMStartEvent) => { + const evalID = (event as any).reason.parent?.caller?.evaluationRunId || (event as any).reason.caller?.evaluationRunId + if (!evalID) return + const model = (event as any).reason?.caller?.model + if (model) { + EvaluationRunTracerLlama.models.set(evalID, model) + try { + const encoding = encoding_for_model(model) + if (encoding) { + const { messages } = event.detail.payload + let tokenCount = messages.reduce((count: number, message: ChatMessage) => { + return count + encoding.encode(extractText(message.content)).length + }, 0) + EvaluationRunTracerLlama.tokenCounts.set(evalID + '_promptTokens', tokenCount) + EvaluationRunTracerLlama.tokenCounts.set(evalID + '_outputTokens', 0) + } + } catch (e) { + // catch the error and continue to work. + } + } + EvaluationRunTracerLlama.startTimes.set(evalID + '_llm', event.timeStamp) + }) + Settings.callbackManager.on('llm-end', (event: LLMEndEvent) => { + this.calculateAndSetMetrics(event, 'llm') + }) + Settings.callbackManager.on('llm-stream', (event: LLMStreamEvent) => { + const evalID = (event as any).reason.parent?.caller?.evaluationRunId || (event as any).reason.caller?.evaluationRunId + if (!evalID) return + const { chunk } = event.detail.payload + const { delta } = chunk + const model = (event as any).reason?.caller?.model + try { + const encoding = encoding_for_model(model) + if (encoding) { + let tokenCount = EvaluationRunTracerLlama.tokenCounts.get(evalID + '_outputTokens') || 0 + tokenCount += encoding.encode(extractText(delta)).length + EvaluationRunTracerLlama.tokenCounts.set(evalID + '_outputTokens', tokenCount) + } + } catch (e) { + // catch the error and continue to work. + } + }) + Settings.callbackManager.on('retrieve-start', (event: RetrievalStartEvent) => { + const evalID = (event as any).reason.parent?.caller?.evaluationRunId || (event as any).reason.caller?.evaluationRunId + if (evalID) { + EvaluationRunTracerLlama.startTimes.set(evalID + '_retriever', event.timeStamp) + } + }) + Settings.callbackManager.on('retrieve-end', (event: RetrievalEndEvent) => { + this.calculateAndSetMetrics(event, 'retriever') + }) + Settings.callbackManager.on('agent-start', (event: AgentStartEvent) => { + const evalID = (event as any).reason.parent?.caller?.evaluationRunId || (event as any).reason.caller?.evaluationRunId + if (evalID) { + EvaluationRunTracerLlama.startTimes.set(evalID + '_agent', event.timeStamp) + } + }) + Settings.callbackManager.on('agent-end', (event: AgentEndEvent) => { + this.calculateAndSetMetrics(event, 'agent') + }) + EvaluationRunTracerLlama.cbInit = true + } + } + + private static calculateAndSetMetrics(event: any, label: string) { + const evalID = event.reason.parent?.caller?.evaluationRunId || event.reason.caller?.evaluationRunId + if (!evalID) return + const startTime = EvaluationRunTracerLlama.startTimes.get(evalID + '_' + label) as number + let model = + (event as any).reason?.caller?.model || (event as any).reason?.caller?.llm?.model || EvaluationRunTracerLlama.models.get(evalID) + + if (event.detail.payload?.response?.message && model) { + try { + const encoding = encoding_for_model(model) + if (encoding) { + let tokenCount = EvaluationRunTracerLlama.tokenCounts.get(evalID + '_outputTokens') || 0 + tokenCount += encoding.encode(event.detail.payload.response?.message?.content || '').length + EvaluationRunTracerLlama.tokenCounts.set(evalID + '_outputTokens', tokenCount) + } + } catch (e) { + // catch the error and continue to work. + } + } + + // Anthropic + if (event.detail?.payload?.response?.raw?.usage) { + const usage = event.detail.payload.response.raw.usage + if (usage.output_tokens) { + const metric = { + completionTokens: usage.output_tokens, + promptTokens: usage.input_tokens, + model: model, + totalTokens: usage.input_tokens + usage.output_tokens + } + EvaluationRunner.addMetrics(evalID, JSON.stringify(metric)) + } else if (usage.completion_tokens) { + const metric = { + completionTokens: usage.completion_tokens, + promptTokens: usage.prompt_tokens, + model: model, + totalTokens: usage.total_tokens + } + EvaluationRunner.addMetrics(evalID, JSON.stringify(metric)) + } + } else if (event.detail?.payload?.response?.raw['amazon-bedrock-invocationMetrics']) { + const usage = event.detail?.payload?.response?.raw['amazon-bedrock-invocationMetrics'] + const metric = { + completionTokens: usage.outputTokenCount, + promptTokens: usage.inputTokenCount, + model: event.detail?.payload?.response?.raw.model, + totalTokens: usage.inputTokenCount + usage.outputTokenCount + } + EvaluationRunner.addMetrics(evalID, JSON.stringify(metric)) + } else { + const metric = { + [label]: (event.timeStamp - startTime).toFixed(2), + completionTokens: EvaluationRunTracerLlama.tokenCounts.get(evalID + '_outputTokens'), + promptTokens: EvaluationRunTracerLlama.tokenCounts.get(evalID + '_promptTokens'), + model: model || EvaluationRunTracerLlama.models.get(evalID) || '', + totalTokens: + (EvaluationRunTracerLlama.tokenCounts.get(evalID + '_outputTokens') || 0) + + (EvaluationRunTracerLlama.tokenCounts.get(evalID + '_promptTokens') || 0) + } + EvaluationRunner.addMetrics(evalID, JSON.stringify(metric)) + } + + //cleanup + EvaluationRunTracerLlama.startTimes.delete(evalID + '_' + label) + EvaluationRunTracerLlama.startTimes.delete(evalID + '_outputTokens') + EvaluationRunTracerLlama.startTimes.delete(evalID + '_promptTokens') + EvaluationRunTracerLlama.models.delete(evalID) + } + + static async injectEvaluationMetadata(nodeData: INodeData, options: ICommonObject, callerObj: any) { + if (options.evaluationRunId && callerObj) { + // these are needed for evaluation runs + options.llamaIndex = true + await additionalCallbacks(nodeData, options) + Object.defineProperty(callerObj, 'evaluationRunId', { + enumerable: true, + configurable: true, + writable: true, + value: options.evaluationRunId + }) + } + } +} + +// from https://github.com/run-llama/LlamaIndexTS/blob/main/packages/core/src/llm/utils.ts +export function extractText(message: MessageContent): string { + if (typeof message !== 'string' && !Array.isArray(message)) { + console.warn('extractText called with non-MessageContent message, this is likely a bug.') + return `${message}` + } else if (typeof message !== 'string' && Array.isArray(message)) { + // message is of type MessageContentDetail[] - retrieve just the text parts and concatenate them + // so we can pass them to the context generator + return message + .filter((c): c is MessageContentTextDetail => c.type === 'text') + .map((c) => c.text) + .join('\n\n') + } else { + return message + } +} diff --git a/packages/components/evaluation/EvaluationRunner.ts b/packages/components/evaluation/EvaluationRunner.ts new file mode 100644 index 000000000..acde79446 --- /dev/null +++ b/packages/components/evaluation/EvaluationRunner.ts @@ -0,0 +1,226 @@ +import axios from 'axios' +import { v4 as uuidv4 } from 'uuid' +import { ICommonObject } from '../src' + +import { getModelConfigByModelName, MODEL_TYPE } from '../src/modelLoader' + +export class EvaluationRunner { + static metrics = new Map() + + static getCostMetrics = async (selectedProvider: string, selectedModel: string) => { + let modelConfig = await getModelConfigByModelName(MODEL_TYPE.CHAT, selectedProvider, selectedModel) + if (modelConfig) { + if (modelConfig['cost_values']) { + return modelConfig.cost_values + } + return { cost_values: modelConfig } + } else { + modelConfig = await getModelConfigByModelName(MODEL_TYPE.LLM, selectedProvider, selectedModel) + if (modelConfig) { + if (modelConfig['cost_values']) { + return modelConfig.cost_values + } + return { cost_values: modelConfig } + } + } + return undefined + } + + static async getAndDeleteMetrics(id: string) { + const val = EvaluationRunner.metrics.get(id) + if (val) { + try { + //first lets get the provider and model + let selectedModel = undefined + let selectedProvider = undefined + if (val && val.length > 0) { + let modelName = '' + let providerName = '' + for (let i = 0; i < val.length; i++) { + const metric = val[i] + if (typeof metric === 'object') { + modelName = metric['model'] + providerName = metric['provider'] + } else { + modelName = JSON.parse(metric)['model'] + providerName = JSON.parse(metric)['provider'] + } + + if (modelName) { + selectedModel = modelName + } + if (providerName) { + selectedProvider = providerName + } + } + } + if (selectedProvider && selectedModel) { + const modelConfig = await EvaluationRunner.getCostMetrics(selectedProvider, selectedModel) + if (modelConfig) { + val.push(JSON.stringify({ cost_values: modelConfig })) + } + } + } catch (error) { + //stay silent + } + } + EvaluationRunner.metrics.delete(id) + return val + } + + static addMetrics(id: string, metric: string) { + if (EvaluationRunner.metrics.has(id)) { + EvaluationRunner.metrics.get(id)?.push(metric) + } else { + EvaluationRunner.metrics.set(id, [metric]) + } + } + + baseURL = '' + + constructor(baseURL: string) { + this.baseURL = baseURL + } + + getChatflowApiKey(chatflowId: string, apiKeys: { chatflowId: string; apiKey: string }[] = []) { + return apiKeys.find((item) => item.chatflowId === chatflowId)?.apiKey || '' + } + + public async runEvaluations(data: ICommonObject) { + const chatflowIds = JSON.parse(data.chatflowId) + const returnData: ICommonObject = {} + returnData.evaluationId = data.evaluationId + returnData.runDate = new Date() + returnData.rows = [] + for (let i = 0; i < data.dataset.rows.length; i++) { + returnData.rows.push({ + input: data.dataset.rows[i].input, + expectedOutput: data.dataset.rows[i].output, + itemNo: data.dataset.rows[i].sequenceNo, + evaluations: [], + status: 'pending' + }) + } + for (let i = 0; i < chatflowIds.length; i++) { + const chatflowId = chatflowIds[i] + await this.evaluateChatflow(chatflowId, this.getChatflowApiKey(chatflowId, data.apiKeys), data, returnData) + } + return returnData + } + + async evaluateChatflow(chatflowId: string, apiKey: string, data: any, returnData: any) { + for (let i = 0; i < data.dataset.rows.length; i++) { + const item = data.dataset.rows[i] + const uuid = uuidv4() + + const headers: any = { + 'X-Request-ID': uuid, + 'X-Flowise-Evaluation': 'true' + } + if (apiKey) { + headers['Authorization'] = `Bearer ${apiKey}` + } + let axiosConfig = { + headers: headers + } + let startTime = performance.now() + const runData: any = {} + runData.chatflowId = chatflowId + runData.startTime = startTime + const postData: any = { question: item.input, evaluationRunId: uuid, evaluation: true } + if (data.sessionId) { + postData.overrideConfig = { sessionId: data.sessionId } + } + try { + let response = await axios.post(`${this.baseURL}/api/v1/prediction/${chatflowId}`, postData, axiosConfig) + let agentFlowMetrics: any[] = [] + if (response?.data?.agentFlowExecutedData) { + for (let i = 0; i < response.data.agentFlowExecutedData.length; i++) { + const agentFlowExecutedData = response.data.agentFlowExecutedData[i] + const input_tokens = agentFlowExecutedData?.data?.output?.usageMetadata?.input_tokens || 0 + const output_tokens = agentFlowExecutedData?.data?.output?.usageMetadata?.output_tokens || 0 + const total_tokens = + agentFlowExecutedData?.data?.output?.usageMetadata?.total_tokens || input_tokens + output_tokens + const metrics: any = { + promptTokens: input_tokens, + completionTokens: output_tokens, + totalTokens: total_tokens, + provider: + agentFlowExecutedData.data?.input?.llmModelConfig?.llmModel || + agentFlowExecutedData.data?.input?.agentModelConfig?.agentModel, + model: + agentFlowExecutedData.data?.input?.llmModelConfig?.modelName || + agentFlowExecutedData.data?.input?.agentModelConfig?.modelName, + nodeLabel: agentFlowExecutedData?.nodeLabel, + nodeId: agentFlowExecutedData?.nodeId + } + if (metrics.provider && metrics.model) { + const modelConfig = await EvaluationRunner.getCostMetrics(metrics.provider, metrics.model) + if (modelConfig) { + metrics.cost_values = { + input_cost: (modelConfig.cost_values.input_cost || 0) * (input_tokens / 1000), + output_cost: (modelConfig.cost_values.output_cost || 0) * (output_tokens / 1000) + } + metrics.cost_values.total_cost = metrics.cost_values.input_cost + metrics.cost_values.output_cost + } + } + agentFlowMetrics.push(metrics) + } + } + const endTime = performance.now() + const timeTaken = (endTime - startTime).toFixed(2) + if (response?.data?.metrics) { + runData.metrics = response.data.metrics + runData.metrics.push({ + apiLatency: timeTaken + }) + } else { + runData.metrics = [ + { + apiLatency: timeTaken + } + ] + } + if (agentFlowMetrics.length > 0) { + runData.nested_metrics = agentFlowMetrics + } + runData.status = 'complete' + let resultText = '' + if (response.data.text) resultText = response.data.text + else if (response.data.json) resultText = '```json\n' + JSON.stringify(response.data.json, null, 2) + else resultText = JSON.stringify(response.data, null, 2) + + runData.actualOutput = resultText + runData.latency = timeTaken + runData.error = '' + } catch (error: any) { + runData.status = 'error' + runData.actualOutput = '' + runData.error = error?.response?.data?.message + ? error.response.data.message + : error?.message + ? error.message + : 'Unknown error' + try { + if (runData.error.indexOf('-') > -1) { + // if there is a dash, remove all content before + runData.error = 'Error: ' + runData.error.substr(runData.error.indexOf('-') + 1).trim() + } + } catch (error) { + //stay silent + } + const endTime = performance.now() + const timeTaken = (endTime - startTime).toFixed(2) + runData.metrics = [ + { + apiLatency: timeTaken + } + ] + runData.latency = timeTaken + } + runData.uuid = uuid + returnData.rows[i].evaluations.push(runData) + } + return returnData + } +} diff --git a/packages/components/jest.config.js b/packages/components/jest.config.js new file mode 100644 index 000000000..deffa4d4b --- /dev/null +++ b/packages/components/jest.config.js @@ -0,0 +1,15 @@ +module.exports = { + preset: 'ts-jest', + testEnvironment: 'node', + roots: ['/nodes'], + transform: { + '^.+\\.tsx?$': 'ts-jest' + }, + testRegex: '(/__tests__/.*|(\\.|/)(test|spec))\\.tsx?$', + moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node'], + verbose: true, + testPathIgnorePatterns: ['/node_modules/', '/dist/'], + moduleNameMapper: { + '^../../../src/(.*)$': '/src/$1' + } +} diff --git a/packages/components/models.json b/packages/components/models.json index 60d0173c4..7551c6097 100644 --- a/packages/components/models.json +++ b/packages/components/models.json @@ -3,6 +3,41 @@ { "name": "awsChatBedrock", "models": [ + { + "label": "anthropic.claude-sonnet-4-5-20250929-v1:0", + "name": "anthropic.claude-sonnet-4-5-20250929-v1:0", + "description": "Claude 4.5 Sonnet", + "input_cost": 0.000003, + "output_cost": 0.000015 + }, + { + "label": "anthropic.claude-haiku-4-5-20251001-v1:0", + "name": "anthropic.claude-haiku-4-5-20251001-v1:0", + "description": "Claude 4.5 Haiku", + "input_cost": 0.000001, + "output_cost": 0.000005 + }, + { + "label": "openai.gpt-oss-20b-1:0", + "name": "openai.gpt-oss-20b-1:0", + "description": "21B parameters model optimized for lower latency, local, and specialized use cases", + "input_cost": 0.00007, + "output_cost": 0.0003 + }, + { + "label": "openai.gpt-oss-120b-1:0", + "name": "openai.gpt-oss-120b-1:0", + "description": "120B parameters model optimized for production, general purpose, and high-reasoning use cases", + "input_cost": 0.00015, + "output_cost": 0.0006 + }, + { + "label": "anthropic.claude-opus-4-1-20250805-v1:0", + "name": "anthropic.claude-opus-4-1-20250805-v1:0", + "description": "Claude 4.1 Opus", + "input_cost": 0.000015, + "output_cost": 0.000075 + }, { "label": "anthropic.claude-sonnet-4-20250514-v1:0", "name": "anthropic.claude-sonnet-4-20250514-v1:0", @@ -280,6 +315,30 @@ { "name": "azureChatOpenAI", "models": [ + { + "label": "gpt-5.1", + "name": "gpt-5.1", + "input_cost": 0.00000125, + "output_cost": 0.00001 + }, + { + "label": "gpt-5", + "name": "gpt-5", + "input_cost": 0.00000125, + "output_cost": 0.00001 + }, + { + "label": "gpt-5-mini", + "name": "gpt-5-mini", + "input_cost": 0.00000025, + "output_cost": 0.000002 + }, + { + "label": "gpt-5-nano", + "name": "gpt-5-nano", + "input_cost": 0.00000005, + "output_cost": 0.0000004 + }, { "label": "gpt-4.1", "name": "gpt-4.1", @@ -357,6 +416,18 @@ "name": "gpt-4.5-preview", "input_cost": 0.000075, "output_cost": 0.00015 + }, + { + "label": "gpt-4.1-mini", + "name": "gpt-4.1-mini", + "input_cost": 0.0000004, + "output_cost": 0.0000016 + }, + { + "label": "gpt-5-chat-latest", + "name": "gpt-5-chat-latest", + "input_cost": 0.00000125, + "output_cost": 0.00001 } ] }, @@ -416,12 +487,38 @@ "name": "gpt-4-1106-preview", "input_cost": 0.00001, "output_cost": 0.00003 + }, + { + "label": "gpt-4.1-mini", + "name": "gpt-4.1-mini", + "input_cost": 0.0000004, + "output_cost": 0.0000016 + }, + { + "label": "gpt-5-chat-latest", + "name": "gpt-5-chat-latest", + "input_cost": 0.00000125, + "output_cost": 0.00001 } ] }, { "name": "chatAnthropic", "models": [ + { + "label": "claude-sonnet-4-5", + "name": "claude-sonnet-4-5", + "description": "Claude 4.5 Sonnet", + "input_cost": 0.000003, + "output_cost": 0.000015 + }, + { + "label": "claude-haiku-4-5", + "name": "claude-haiku-4-5", + "description": "Claude 4.5 Haiku", + "input_cost": 0.000001, + "output_cost": 0.000005 + }, { "label": "claude-sonnet-4-0", "name": "claude-sonnet-4-0", @@ -429,6 +526,13 @@ "input_cost": 0.000003, "output_cost": 0.000015 }, + { + "label": "claude-opus-4-1", + "name": "claude-opus-4-1", + "description": "Claude 4.1 Opus", + "input_cost": 0.000015, + "output_cost": 0.000075 + }, { "label": "claude-opus-4-0", "name": "claude-opus-4-0", @@ -524,17 +628,29 @@ "name": "chatGoogleGenerativeAI", "models": [ { - "label": "gemini-2.5-flash-preview-05-20", - "name": "gemini-2.5-flash-preview-05-20", - "input_cost": 0.15e-6, - "output_cost": 6e-7 + "label": "gemini-3-pro-preview", + "name": "gemini-3-pro-preview", + "input_cost": 0.00002, + "output_cost": 0.00012 }, { - "label": "gemini-2.5-pro-preview-03-25", - "name": "gemini-2.5-pro-preview-03-25", + "label": "gemini-2.5-pro", + "name": "gemini-2.5-pro", + "input_cost": 0.3e-6, + "output_cost": 0.000025 + }, + { + "label": "gemini-2.5-flash", + "name": "gemini-2.5-flash", "input_cost": 1.25e-6, "output_cost": 0.00001 }, + { + "label": "gemini-2.5-flash-lite", + "name": "gemini-2.5-flash-lite", + "input_cost": 1e-7, + "output_cost": 4e-7 + }, { "label": "gemini-2.0-flash", "name": "gemini-2.0-flash", @@ -581,6 +697,42 @@ { "name": "chatGoogleVertexAI", "models": [ + { + "label": "gemini-3-pro-preview", + "name": "gemini-3-pro-preview", + "input_cost": 0.00002, + "output_cost": 0.00012 + }, + { + "label": "gemini-2.5-pro", + "name": "gemini-2.5-pro", + "input_cost": 0.3e-6, + "output_cost": 0.000025 + }, + { + "label": "gemini-2.5-flash", + "name": "gemini-2.5-flash", + "input_cost": 1.25e-6, + "output_cost": 0.00001 + }, + { + "label": "gemini-2.5-flash-lite", + "name": "gemini-2.5-flash-lite", + "input_cost": 1e-7, + "output_cost": 4e-7 + }, + { + "label": "gemini-2.0-flash", + "name": "gemini-2.0-flash-001", + "input_cost": 1e-7, + "output_cost": 4e-7 + }, + { + "label": "gemini-2.0-flash-lite", + "name": "gemini-2.0-flash-lite-001", + "input_cost": 7.5e-8, + "output_cost": 3e-7 + }, { "label": "gemini-1.5-flash-002", "name": "gemini-1.5-flash-002", @@ -617,6 +769,27 @@ "input_cost": 1.25e-7, "output_cost": 3.75e-7 }, + { + "label": "claude-sonnet-4-5@20250929", + "name": "claude-sonnet-4-5@20250929", + "description": "Claude 4.5 Sonnet", + "input_cost": 0.000003, + "output_cost": 0.000015 + }, + { + "label": "claude-haiku-4-5@20251001", + "name": "claude-haiku-4-5@20251001", + "description": "Claude 4.5 Haiku", + "input_cost": 0.000001, + "output_cost": 0.000005 + }, + { + "label": "claude-opus-4-1@20250805", + "name": "claude-opus-4-1@20250805", + "description": "Claude 4.1 Opus", + "input_cost": 0.000015, + "output_cost": 0.000075 + }, { "label": "claude-sonnet-4@20250514", "name": "claude-sonnet-4@20250514", @@ -673,11 +846,63 @@ "input_cost": 2.5e-7, "output_cost": 1.25e-6 } + ], + "regions": [ + { "label": "us-east1", "name": "us-east1" }, + { "label": "us-east4", "name": "us-east4" }, + { "label": "us-central1", "name": "us-central1" }, + { "label": "us-west1", "name": "us-west1" }, + { "label": "europe-west4", "name": "europe-west4" }, + { "label": "europe-west1", "name": "europe-west1" }, + { "label": "europe-west3", "name": "europe-west3" }, + { "label": "europe-west2", "name": "europe-west2" }, + { "label": "asia-east1", "name": "asia-east1" }, + { "label": "asia-southeast1", "name": "asia-southeast1" }, + { "label": "asia-northeast1", "name": "asia-northeast1" }, + { "label": "asia-south1", "name": "asia-south1" }, + { "label": "australia-southeast1", "name": "australia-southeast1" }, + { "label": "southamerica-east1", "name": "southamerica-east1" }, + { "label": "africa-south1", "name": "africa-south1" }, + { "label": "asia-east2", "name": "asia-east2" }, + { "label": "asia-northeast2", "name": "asia-northeast2" }, + { "label": "asia-northeast3", "name": "asia-northeast3" }, + { "label": "asia-south2", "name": "asia-south2" }, + { "label": "asia-southeast2", "name": "asia-southeast2" }, + { "label": "australia-southeast2", "name": "australia-southeast2" }, + { "label": "europe-central2", "name": "europe-central2" }, + { "label": "europe-north1", "name": "europe-north1" }, + { "label": "europe-north2", "name": "europe-north2" }, + { "label": "europe-southwest1", "name": "europe-southwest1" }, + { "label": "europe-west10", "name": "europe-west10" }, + { "label": "europe-west12", "name": "europe-west12" }, + { "label": "europe-west6", "name": "europe-west6" }, + { "label": "europe-west8", "name": "europe-west8" }, + { "label": "europe-west9", "name": "europe-west9" }, + { "label": "me-central1", "name": "me-central1" }, + { "label": "me-central2", "name": "me-central2" }, + { "label": "me-west1", "name": "me-west1" }, + { "label": "northamerica-northeast1", "name": "northamerica-northeast1" }, + { "label": "northamerica-northeast2", "name": "northamerica-northeast2" }, + { "label": "northamerica-south1", "name": "northamerica-south1" }, + { "label": "southamerica-west1", "name": "southamerica-west1" }, + { "label": "us-east5", "name": "us-east5" }, + { "label": "us-south1", "name": "us-south1" }, + { "label": "us-west2", "name": "us-west2" }, + { "label": "us-west3", "name": "us-west3" }, + { "label": "us-west4", "name": "us-west4" } ] }, { "name": "groqChat", "models": [ + { + "label": "openai/gpt-oss-20b", + "name": "openai/gpt-oss-20b" + }, + { + "label": "openai/gpt-oss-120b", + "name": "openai/gpt-oss-120b" + }, { "label": "meta-llama/llama-4-maverick-17b-128e-instruct", "name": "meta-llama/llama-4-maverick-17b-128e-instruct" @@ -789,6 +1014,30 @@ { "name": "chatOpenAI", "models": [ + { + "label": "gpt-5.1", + "name": "gpt-5.1", + "input_cost": 0.00000125, + "output_cost": 0.00001 + }, + { + "label": "gpt-5", + "name": "gpt-5", + "input_cost": 0.00000125, + "output_cost": 0.00001 + }, + { + "label": "gpt-5-mini", + "name": "gpt-5-mini", + "input_cost": 0.00000025, + "output_cost": 0.000002 + }, + { + "label": "gpt-5-nano", + "name": "gpt-5-nano", + "input_cost": 0.00000005, + "output_cost": 0.0000004 + }, { "label": "gpt-4.1", "name": "gpt-4.1", @@ -1217,6 +1466,18 @@ "name": "mistral-large-2402", "input_cost": 0.002, "output_cost": 0.006 + }, + { + "label": "codestral-latsest", + "name": "codestral-latest", + "input_cost": 0.0002, + "output_cost": 0.0006 + }, + { + "label": "devstral-small-2505", + "name": "devstral-small-2505", + "input_cost": 0.0001, + "output_cost": 0.0003 } ] }, @@ -1511,6 +1772,18 @@ "name": "gpt-4-32k", "input_cost": 0.00006, "output_cost": 0.00012 + }, + { + "label": "gpt-4.1-mini", + "name": "gpt-4.1-mini", + "input_cost": 0.0000004, + "output_cost": 0.0000016 + }, + { + "label": "gpt-5-chat-latest", + "name": "gpt-5-chat-latest", + "input_cost": 0.00000125, + "output_cost": 0.00001 } ] }, @@ -1711,29 +1984,65 @@ "name": "googlevertexaiEmbeddings", "models": [ { - "label": "multimodalembedding", - "name": "multimodalembedding" + "label": "gemini-embedding-001", + "name": "gemini-embedding-001" }, { "label": "text-embedding-004", "name": "text-embedding-004" }, + { + "label": "text-embedding-005", + "name": "text-embedding-005" + }, { "label": "text-multilingual-embedding-002", "name": "text-multilingual-embedding-002" - }, - { - "label": "textembedding-gecko@001", - "name": "textembedding-gecko@001" - }, - { - "label": "textembedding-gecko@latest", - "name": "textembedding-gecko@latest" - }, - { - "label": "textembedding-gecko-multilingual@latest", - "name": "textembedding-gecko-multilingual@latest" } + ], + "regions": [ + { "label": "us-east1", "name": "us-east1" }, + { "label": "us-east4", "name": "us-east4" }, + { "label": "us-central1", "name": "us-central1" }, + { "label": "us-west1", "name": "us-west1" }, + { "label": "europe-west4", "name": "europe-west4" }, + { "label": "europe-west1", "name": "europe-west1" }, + { "label": "europe-west3", "name": "europe-west3" }, + { "label": "europe-west2", "name": "europe-west2" }, + { "label": "asia-east1", "name": "asia-east1" }, + { "label": "asia-southeast1", "name": "asia-southeast1" }, + { "label": "asia-northeast1", "name": "asia-northeast1" }, + { "label": "asia-south1", "name": "asia-south1" }, + { "label": "australia-southeast1", "name": "australia-southeast1" }, + { "label": "southamerica-east1", "name": "southamerica-east1" }, + { "label": "africa-south1", "name": "africa-south1" }, + { "label": "asia-east2", "name": "asia-east2" }, + { "label": "asia-northeast2", "name": "asia-northeast2" }, + { "label": "asia-northeast3", "name": "asia-northeast3" }, + { "label": "asia-south2", "name": "asia-south2" }, + { "label": "asia-southeast2", "name": "asia-southeast2" }, + { "label": "australia-southeast2", "name": "australia-southeast2" }, + { "label": "europe-central2", "name": "europe-central2" }, + { "label": "europe-north1", "name": "europe-north1" }, + { "label": "europe-north2", "name": "europe-north2" }, + { "label": "europe-southwest1", "name": "europe-southwest1" }, + { "label": "europe-west10", "name": "europe-west10" }, + { "label": "europe-west12", "name": "europe-west12" }, + { "label": "europe-west6", "name": "europe-west6" }, + { "label": "europe-west8", "name": "europe-west8" }, + { "label": "europe-west9", "name": "europe-west9" }, + { "label": "me-central1", "name": "me-central1" }, + { "label": "me-central2", "name": "me-central2" }, + { "label": "me-west1", "name": "me-west1" }, + { "label": "northamerica-northeast1", "name": "northamerica-northeast1" }, + { "label": "northamerica-northeast2", "name": "northamerica-northeast2" }, + { "label": "northamerica-south1", "name": "northamerica-south1" }, + { "label": "southamerica-west1", "name": "southamerica-west1" }, + { "label": "us-east5", "name": "us-east5" }, + { "label": "us-south1", "name": "us-south1" }, + { "label": "us-west2", "name": "us-west2" }, + { "label": "us-west3", "name": "us-west3" }, + { "label": "us-west4", "name": "us-west4" } ] }, { diff --git a/packages/components/nodes/agentflow/Agent/Agent.ts b/packages/components/nodes/agentflow/Agent/Agent.ts index 849a2e3e5..b8aa80222 100644 --- a/packages/components/nodes/agentflow/Agent/Agent.ts +++ b/packages/components/nodes/agentflow/Agent/Agent.ts @@ -3,6 +3,7 @@ import { ICommonObject, IDatabaseEntity, IHumanInput, + IMessage, INode, INodeData, INodeOptionsValue, @@ -15,7 +16,7 @@ import { AnalyticHandler } from '../../../src/handler' import { DEFAULT_SUMMARIZER_TEMPLATE } from '../prompt' import { ILLMMessage } from '../Interface.Agentflow' import { Tool } from '@langchain/core/tools' -import { ARTIFACTS_PREFIX, SOURCE_DOCUMENTS_PREFIX } from '../../../src/agents' +import { ARTIFACTS_PREFIX, SOURCE_DOCUMENTS_PREFIX, TOOL_ARGS_PREFIX } from '../../../src/agents' import { flatten } from 'lodash' import zodToJsonSchema from 'zod-to-json-schema' import { getErrorMessage } from '../../../src/error' @@ -27,6 +28,15 @@ import { replaceBase64ImagesWithFileReferences, updateFlowState } from '../utils' +import { + convertMultiOptionsToStringArray, + getCredentialData, + getCredentialParam, + processTemplateVariables, + configureStructuredOutput +} from '../../../src/utils' +import { addSingleFileToStorage } from '../../../src/storageUtils' +import fetch from 'node-fetch' interface ITool { agentSelectedTool: string @@ -77,7 +87,7 @@ class Agent_Agentflow implements INode { constructor() { this.label = 'Agent' this.name = 'agentAgentflow' - this.version = 1.0 + this.version = 2.2 this.type = 'Agent' this.category = 'Agent Flows' this.description = 'Dynamically choose and utilize tools during runtime, enabling multi-step reasoning' @@ -131,6 +141,82 @@ class Agent_Agentflow implements INode { } ] }, + { + label: 'OpenAI Built-in Tools', + name: 'agentToolsBuiltInOpenAI', + type: 'multiOptions', + optional: true, + options: [ + { + label: 'Web Search', + name: 'web_search_preview', + description: 'Search the web for the latest information' + }, + { + label: 'Code Interpreter', + name: 'code_interpreter', + description: 'Write and run Python code in a sandboxed environment' + }, + { + label: 'Image Generation', + name: 'image_generation', + description: 'Generate images based on a text prompt' + } + ], + show: { + agentModel: 'chatOpenAI' + } + }, + { + label: 'Gemini Built-in Tools', + name: 'agentToolsBuiltInGemini', + type: 'multiOptions', + optional: true, + options: [ + { + label: 'URL Context', + name: 'urlContext', + description: 'Extract content from given URLs' + }, + { + label: 'Google Search', + name: 'googleSearch', + description: 'Search real-time web content' + } + ], + show: { + agentModel: 'chatGoogleGenerativeAI' + } + }, + { + label: 'Anthropic Built-in Tools', + name: 'agentToolsBuiltInAnthropic', + type: 'multiOptions', + optional: true, + options: [ + { + label: 'Web Search', + name: 'web_search_20250305', + description: 'Search the web for the latest information' + }, + { + label: 'Web Fetch', + name: 'web_fetch_20250910', + description: 'Retrieve full content from specified web pages' + } + /* + * Not supported yet as we need to get bash_code_execution_tool_result from content: + https://docs.claude.com/en/docs/agents-and-tools/tool-use/code-execution-tool#retrieve-generated-files + { + label: 'Code Interpreter', + name: 'code_execution_20250825', + description: 'Write and run Python code in a sandboxed environment' + }*/ + ], + show: { + agentModel: 'chatAnthropic' + } + }, { label: 'Tools', name: 'agentTools', @@ -314,6 +400,108 @@ class Agent_Agentflow implements INode { ], default: 'userMessage' }, + { + label: 'JSON Structured Output', + name: 'agentStructuredOutput', + description: 'Instruct the Agent to give output in a JSON structured schema', + type: 'array', + optional: true, + acceptVariable: true, + array: [ + { + label: 'Key', + name: 'key', + type: 'string' + }, + { + label: 'Type', + name: 'type', + type: 'options', + options: [ + { + label: 'String', + name: 'string' + }, + { + label: 'String Array', + name: 'stringArray' + }, + { + label: 'Number', + name: 'number' + }, + { + label: 'Boolean', + name: 'boolean' + }, + { + label: 'Enum', + name: 'enum' + }, + { + label: 'JSON Array', + name: 'jsonArray' + } + ] + }, + { + label: 'Enum Values', + name: 'enumValues', + type: 'string', + placeholder: 'value1, value2, value3', + description: 'Enum values. Separated by comma', + optional: true, + show: { + 'agentStructuredOutput[$index].type': 'enum' + } + }, + { + label: 'JSON Schema', + name: 'jsonSchema', + type: 'code', + placeholder: `{ + "answer": { + "type": "string", + "description": "Value of the answer" + }, + "reason": { + "type": "string", + "description": "Reason for the answer" + }, + "optional": { + "type": "boolean" + }, + "count": { + "type": "number" + }, + "children": { + "type": "array", + "items": { + "type": "object", + "properties": { + "value": { + "type": "string", + "description": "Value of the children's answer" + } + } + } + } +}`, + description: 'JSON schema for the structured output', + optional: true, + hideCodeExecute: true, + show: { + 'agentStructuredOutput[$index].type': 'jsonArray' + } + }, + { + label: 'Description', + name: 'description', + type: 'string', + placeholder: 'Description of the key' + } + ] + }, { label: 'Update Flow State', name: 'agentUpdateState', @@ -427,7 +615,8 @@ class Agent_Agentflow implements INode { return returnData } - const stores = await appDataSource.getRepository(databaseEntities['DocumentStore']).find() + const searchOptions = options.searchOptions || {} + const stores = await appDataSource.getRepository(databaseEntities['DocumentStore']).findBy(searchOptions) for (const store of stores) { if (store.status === 'UPSERTED') { const obj = { @@ -495,18 +684,21 @@ class Agent_Agentflow implements INode { } } const toolInstance = await newToolNodeInstance.init(newNodeData, '', options) - if (tool.agentSelectedToolRequiresHumanInput) { - toolInstance.requiresHumanInput = true - } // toolInstance might returns a list of tools like MCP tools if (Array.isArray(toolInstance)) { for (const subTool of toolInstance) { const subToolInstance = subTool as Tool ;(subToolInstance as any).agentSelectedTool = tool.agentSelectedTool + if (tool.agentSelectedToolRequiresHumanInput) { + ;(subToolInstance as any).requiresHumanInput = true + } toolsInstance.push(subToolInstance) } } else { + if (tool.agentSelectedToolRequiresHumanInput) { + toolInstance.requiresHumanInput = true + } toolsInstance.push(toolInstance as Tool) } } @@ -519,7 +711,7 @@ class Agent_Agentflow implements INode { } const componentNode = options.componentNodes[agentSelectedTool] - const jsonSchema = zodToJsonSchema(tool.schema) + const jsonSchema = zodToJsonSchema(tool.schema as any) if (jsonSchema.$schema) { delete jsonSchema.$schema } @@ -686,12 +878,14 @@ class Agent_Agentflow implements INode { const memoryType = nodeData.inputs?.agentMemoryType as string const userMessage = nodeData.inputs?.agentUserMessage as string const _agentUpdateState = nodeData.inputs?.agentUpdateState + const _agentStructuredOutput = nodeData.inputs?.agentStructuredOutput const agentMessages = (nodeData.inputs?.agentMessages as unknown as ILLMMessage[]) ?? [] // Extract runtime state and history const state = options.agentflowRuntime?.state as ICommonObject const pastChatHistory = (options.pastChatHistory as BaseMessageLike[]) ?? [] const runtimeChatHistory = (options.agentflowRuntime?.chatHistory as BaseMessageLike[]) ?? [] + const prependedChatHistory = options.prependedChatHistory as IMessage[] const chatId = options.chatId as string // Initialize the LLM model instance @@ -710,6 +904,82 @@ class Agent_Agentflow implements INode { const llmWithoutToolsBind = (await newLLMNodeInstance.init(newNodeData, '', options)) as BaseChatModel let llmNodeInstance = llmWithoutToolsBind + const isStructuredOutput = _agentStructuredOutput && Array.isArray(_agentStructuredOutput) && _agentStructuredOutput.length > 0 + + const agentToolsBuiltInOpenAI = convertMultiOptionsToStringArray(nodeData.inputs?.agentToolsBuiltInOpenAI) + if (agentToolsBuiltInOpenAI && agentToolsBuiltInOpenAI.length > 0) { + for (const tool of agentToolsBuiltInOpenAI) { + const builtInTool: ICommonObject = { + type: tool + } + if (tool === 'code_interpreter') { + builtInTool.container = { type: 'auto' } + } + ;(toolsInstance as any).push(builtInTool) + ;(availableTools as any).push({ + name: tool, + toolNode: { + label: tool, + name: tool + } + }) + } + } + + const agentToolsBuiltInGemini = convertMultiOptionsToStringArray(nodeData.inputs?.agentToolsBuiltInGemini) + if (agentToolsBuiltInGemini && agentToolsBuiltInGemini.length > 0) { + for (const tool of agentToolsBuiltInGemini) { + const builtInTool: ICommonObject = { + [tool]: {} + } + ;(toolsInstance as any).push(builtInTool) + ;(availableTools as any).push({ + name: tool, + toolNode: { + label: tool, + name: tool + } + }) + } + } + + const agentToolsBuiltInAnthropic = convertMultiOptionsToStringArray(nodeData.inputs?.agentToolsBuiltInAnthropic) + if (agentToolsBuiltInAnthropic && agentToolsBuiltInAnthropic.length > 0) { + for (const tool of agentToolsBuiltInAnthropic) { + // split _ to get the tool name by removing the last part (date) + const toolName = tool.split('_').slice(0, -1).join('_') + + if (tool === 'code_execution_20250825') { + ;(llmNodeInstance as any).clientOptions = { + defaultHeaders: { + 'anthropic-beta': ['code-execution-2025-08-25', 'files-api-2025-04-14'] + } + } + } + + if (tool === 'web_fetch_20250910') { + ;(llmNodeInstance as any).clientOptions = { + defaultHeaders: { + 'anthropic-beta': ['web-fetch-2025-09-10'] + } + } + } + + const builtInTool: ICommonObject = { + type: tool, + name: toolName + } + ;(toolsInstance as any).push(builtInTool) + ;(availableTools as any).push({ + name: tool, + toolNode: { + label: tool, + name: tool + } + }) + } + } + if (llmNodeInstance && toolsInstance.length > 0) { if (llmNodeInstance.bindTools === undefined) { throw new Error(`Agent needs to have a function calling capable models.`) @@ -726,11 +996,27 @@ class Agent_Agentflow implements INode { // Use to keep track of past messages with image file references let pastImageMessagesWithFileRef: BaseMessageLike[] = [] + // Prepend history ONLY if it is the first node + if (prependedChatHistory.length > 0 && !runtimeChatHistory.length) { + for (const msg of prependedChatHistory) { + const role: string = msg.role === 'apiMessage' ? 'assistant' : 'user' + const content: string = msg.content ?? '' + messages.push({ + role, + content + }) + } + } + for (const msg of agentMessages) { const role = msg.role const content = msg.content if (role && content) { - messages.push({ role, content }) + if (role === 'system') { + messages.unshift({ role, content }) + } else { + messages.push({ role, content }) + } } } @@ -755,7 +1041,7 @@ class Agent_Agentflow implements INode { /* * If this is the first node: * - Add images to messages if exist - * - Add user message + * - Add user message if it does not exist in the agentMessages array */ if (options.uploads) { const imageContents = await getUniqueImageMessages(options, messages, modelConfig) @@ -766,7 +1052,7 @@ class Agent_Agentflow implements INode { } } - if (input && typeof input === 'string') { + if (input && typeof input === 'string' && !agentMessages.some((msg) => msg.role === 'user')) { messages.push({ role: 'user', content: input @@ -778,7 +1064,7 @@ class Agent_Agentflow implements INode { // Initialize response and determine if streaming is possible let response: AIMessageChunk = new AIMessageChunk('') const isLastNode = options.isLastNode as boolean - const isStreamable = isLastNode && options.sseStreamer !== undefined && modelConfig?.streaming !== false + const isStreamable = isLastNode && options.sseStreamer !== undefined && modelConfig?.streaming !== false && !isStructuredOutput // Start analytics if (analyticHandlers && options.parentTraceIds) { @@ -796,6 +1082,7 @@ class Agent_Agentflow implements INode { let usedTools: IUsedTool[] = [] let sourceDocuments: Array = [] let artifacts: any[] = [] + let fileAnnotations: any[] = [] let additionalTokens = 0 let isWaitingForHumanInput = false @@ -826,7 +1113,8 @@ class Agent_Agentflow implements INode { llmWithoutToolsBind, isStreamable, isLastNode, - iterationContext + iterationContext, + isStructuredOutput }) response = result.response @@ -855,12 +1143,22 @@ class Agent_Agentflow implements INode { } } else { if (isStreamable) { - response = await this.handleStreamingResponse(sseStreamer, llmNodeInstance, messages, chatId, abortController) + response = await this.handleStreamingResponse( + sseStreamer, + llmNodeInstance, + messages, + chatId, + abortController, + isStructuredOutput + ) } else { response = await llmNodeInstance.invoke(messages, { signal: abortController?.signal }) } } + // Address built in tools (after artifacts are processed) + const builtInUsedTools: IUsedTool[] = await this.extractBuiltInUsedTools(response, []) + if (!humanInput && response.tool_calls && response.tool_calls.length > 0) { const result = await this.handleToolCalls({ response, @@ -874,7 +1172,8 @@ class Agent_Agentflow implements INode { llmNodeInstance, isStreamable, isLastNode, - iterationContext + iterationContext, + isStructuredOutput }) response = result.response @@ -901,13 +1200,18 @@ class Agent_Agentflow implements INode { sseStreamer.streamArtifactsEvent(chatId, flatten(artifacts)) } } - } else if (!humanInput && !isStreamable && isLastNode && sseStreamer) { + } else if (!humanInput && !isStreamable && isLastNode && sseStreamer && !isStructuredOutput) { // Stream whole response back to UI if not streaming and no tool calls - let responseContent = JSON.stringify(response, null, 2) - if (typeof response.content === 'string') { - responseContent = response.content + // Skip this if structured output is enabled - it will be streamed after conversion + let finalResponse = '' + if (response.content && Array.isArray(response.content)) { + finalResponse = response.content.map((item: any) => item.text).join('\n') + } else if (response.content && typeof response.content === 'string') { + finalResponse = response.content + } else { + finalResponse = JSON.stringify(response, null, 2) } - sseStreamer.streamTokenEvent(chatId, responseContent) + sseStreamer.streamTokenEvent(chatId, finalResponse) } // Calculate execution time @@ -928,7 +1232,71 @@ class Agent_Agentflow implements INode { } // Prepare final response and output object - const finalResponse = (response.content as string) ?? JSON.stringify(response, null, 2) + let finalResponse = '' + if (response.content && Array.isArray(response.content)) { + finalResponse = response.content.map((item: any) => item.text).join('\n') + } else if (response.content && typeof response.content === 'string') { + finalResponse = response.content + } else { + finalResponse = JSON.stringify(response, null, 2) + } + + // Address built in tools + const additionalBuiltInUsedTools: IUsedTool[] = await this.extractBuiltInUsedTools(response, builtInUsedTools) + if (additionalBuiltInUsedTools.length > 0) { + usedTools = [...new Set([...usedTools, ...additionalBuiltInUsedTools])] + + // Stream used tools if this is the last node + if (isLastNode && sseStreamer) { + sseStreamer.streamUsedToolsEvent(chatId, flatten(usedTools)) + } + } + + // Extract artifacts from annotations in response metadata + if (response.response_metadata) { + const { artifacts: extractedArtifacts, fileAnnotations: extractedFileAnnotations } = + await this.extractArtifactsFromResponse(response.response_metadata, newNodeData, options) + if (extractedArtifacts.length > 0) { + artifacts = [...artifacts, ...extractedArtifacts] + + // Stream artifacts if this is the last node + if (isLastNode && sseStreamer) { + sseStreamer.streamArtifactsEvent(chatId, extractedArtifacts) + } + } + + if (extractedFileAnnotations.length > 0) { + fileAnnotations = [...fileAnnotations, ...extractedFileAnnotations] + + // Stream file annotations if this is the last node + if (isLastNode && sseStreamer) { + sseStreamer.streamFileAnnotationsEvent(chatId, fileAnnotations) + } + } + } + + // Replace sandbox links with proper download URLs. Example: [Download the script](sandbox:/mnt/data/dummy_bar_graph.py) + if (finalResponse.includes('sandbox:/')) { + finalResponse = await this.processSandboxLinks(finalResponse, options.baseURL, options.chatflowid, chatId) + } + + // If is structured output, then invoke LLM again with structured output at the very end after all tool calls + if (isStructuredOutput) { + llmNodeInstance = configureStructuredOutput(llmNodeInstance, _agentStructuredOutput) + const prompt = 'Convert the following response to the structured output format: ' + finalResponse + response = await llmNodeInstance.invoke(prompt, { signal: abortController?.signal }) + + if (typeof response === 'object') { + finalResponse = '```json\n' + JSON.stringify(response, null, 2) + '\n```' + } else { + finalResponse = response + } + + if (isLastNode && sseStreamer) { + sseStreamer.streamTokenEvent(chatId, finalResponse) + } + } + const output = this.prepareOutputObject( response, availableTools, @@ -940,7 +1308,9 @@ class Agent_Agentflow implements INode { sourceDocuments, artifacts, additionalTokens, - isWaitingForHumanInput + isWaitingForHumanInput, + fileAnnotations, + isStructuredOutput ) // End analytics tracking @@ -953,15 +1323,14 @@ class Agent_Agentflow implements INode { this.sendStreamingEvents(options, chatId, response) } - // Process template variables in state - if (newState && Object.keys(newState).length > 0) { - for (const key in newState) { - if (newState[key].toString().includes('{{ output }}')) { - newState[key] = finalResponse - } - } + // Stream file annotations if any were extracted + if (fileAnnotations.length > 0 && isLastNode && sseStreamer) { + sseStreamer.streamFileAnnotationsEvent(chatId, fileAnnotations) } + // Process template variables in state + newState = processTemplateVariables(newState, finalResponse) + // Replace the actual messages array with one that includes the file references for images instead of base64 data const messagesWithFileReferences = replaceBase64ImagesWithFileReferences( messages, @@ -976,7 +1345,19 @@ class Agent_Agentflow implements INode { inputMessages.push(...runtimeImageMessagesWithFileRef) } if (input && typeof input === 'string') { - inputMessages.push({ role: 'user', content: input }) + if (!enableMemory) { + if (!agentMessages.some((msg) => msg.role === 'user')) { + inputMessages.push({ role: 'user', content: input }) + } else { + agentMessages.map((msg) => { + if (msg.role === 'user') { + inputMessages.push({ role: 'user', content: msg.content }) + } + }) + } + } else { + inputMessages.push({ role: 'user', content: input }) + } } } @@ -1006,7 +1387,16 @@ class Agent_Agentflow implements INode { { role: returnRole, content: finalResponse, - name: nodeData?.label ? nodeData?.label.toLowerCase().replace(/\s/g, '_').trim() : nodeData?.id + name: nodeData?.label ? nodeData?.label.toLowerCase().replace(/\s/g, '_').trim() : nodeData?.id, + ...(((artifacts && artifacts.length > 0) || + (fileAnnotations && fileAnnotations.length > 0) || + (usedTools && usedTools.length > 0)) && { + additional_kwargs: { + ...(artifacts && artifacts.length > 0 && { artifacts }), + ...(fileAnnotations && fileAnnotations.length > 0 && { fileAnnotations }), + ...(usedTools && usedTools.length > 0 && { usedTools }) + } + }) } ] } @@ -1022,6 +1412,132 @@ class Agent_Agentflow implements INode { } } + /** + * Extracts built-in used tools from response metadata and processes image generation results + */ + private async extractBuiltInUsedTools(response: AIMessageChunk, builtInUsedTools: IUsedTool[] = []): Promise { + if (!response.response_metadata) { + return builtInUsedTools + } + + const { output, tools, groundingMetadata, urlContextMetadata } = response.response_metadata + + // Handle OpenAI built-in tools + if (output && Array.isArray(output) && output.length > 0 && tools && Array.isArray(tools) && tools.length > 0) { + for (const outputItem of output) { + if (outputItem.type && outputItem.type.endsWith('_call')) { + let toolInput = outputItem.action ?? outputItem.code + let toolOutput = outputItem.status === 'completed' ? 'Success' : outputItem.status + + // Handle image generation calls specially + if (outputItem.type === 'image_generation_call') { + // Create input summary for image generation + toolInput = { + prompt: outputItem.revised_prompt || 'Image generation request', + size: outputItem.size || '1024x1024', + quality: outputItem.quality || 'standard', + output_format: outputItem.output_format || 'png' + } + + // Check if image has been processed (base64 replaced with file path) + if (outputItem.result && !outputItem.result.startsWith('data:') && !outputItem.result.includes('base64')) { + toolOutput = `Image generated and saved` + } else { + toolOutput = `Image generated (base64)` + } + } + + // Remove "_call" suffix to get the base tool name + const baseToolName = outputItem.type.replace('_call', '') + + // Find matching tool that includes the base name in its type + const matchingTool = tools.find((tool) => tool.type && tool.type.includes(baseToolName)) + + if (matchingTool) { + // Check for duplicates + if (builtInUsedTools.find((tool) => tool.tool === matchingTool.type)) { + continue + } + + builtInUsedTools.push({ + tool: matchingTool.type, + toolInput, + toolOutput + }) + } + } + } + } + + // Handle Gemini googleSearch tool + if (groundingMetadata && groundingMetadata.webSearchQueries && Array.isArray(groundingMetadata.webSearchQueries)) { + // Check for duplicates + if (!builtInUsedTools.find((tool) => tool.tool === 'googleSearch')) { + builtInUsedTools.push({ + tool: 'googleSearch', + toolInput: { + queries: groundingMetadata.webSearchQueries + }, + toolOutput: `Searched for: ${groundingMetadata.webSearchQueries.join(', ')}` + }) + } + } + + // Handle Gemini urlContext tool + if (urlContextMetadata && urlContextMetadata.urlMetadata && Array.isArray(urlContextMetadata.urlMetadata)) { + // Check for duplicates + if (!builtInUsedTools.find((tool) => tool.tool === 'urlContext')) { + builtInUsedTools.push({ + tool: 'urlContext', + toolInput: { + urlMetadata: urlContextMetadata.urlMetadata + }, + toolOutput: `Processed ${urlContextMetadata.urlMetadata.length} URL(s)` + }) + } + } + + return builtInUsedTools + } + + /** + * Saves base64 image data to storage and returns file information + */ + private async saveBase64Image( + outputItem: any, + options: ICommonObject + ): Promise<{ filePath: string; fileName: string; totalSize: number } | null> { + try { + if (!outputItem.result) { + return null + } + + // Extract base64 data and create buffer + const base64Data = outputItem.result + const imageBuffer = Buffer.from(base64Data, 'base64') + + // Determine file extension and MIME type + const outputFormat = outputItem.output_format || 'png' + const fileName = `generated_image_${outputItem.id || Date.now()}.${outputFormat}` + const mimeType = outputFormat === 'png' ? 'image/png' : 'image/jpeg' + + // Save the image using the existing storage utility + const { path, totalSize } = await addSingleFileToStorage( + mimeType, + imageBuffer, + fileName, + options.orgId, + options.chatflowid, + options.chatId + ) + + return { filePath: path, fileName, totalSize } + } catch (error) { + console.error('Error saving base64 image:', error) + return null + } + } + /** * Handles memory management based on the specified memory type */ @@ -1184,24 +1700,29 @@ class Agent_Agentflow implements INode { llmNodeInstance: BaseChatModel, messages: BaseMessageLike[], chatId: string, - abortController: AbortController + abortController: AbortController, + isStructuredOutput: boolean = false ): Promise { let response = new AIMessageChunk('') try { for await (const chunk of await llmNodeInstance.stream(messages, { signal: abortController?.signal })) { - if (sseStreamer) { + if (sseStreamer && !isStructuredOutput) { let content = '' - if (Array.isArray(chunk.content) && chunk.content.length > 0) { + + if (typeof chunk === 'string') { + content = chunk + } else if (Array.isArray(chunk.content) && chunk.content.length > 0) { const contents = chunk.content as MessageContentText[] content = contents.map((item) => item.text).join('') - } else { + } else if (chunk.content) { content = chunk.content.toString() } sseStreamer.streamTokenEvent(chatId, content) } - response = response.concat(chunk) + const messageChunk = typeof chunk === 'string' ? new AIMessageChunk(chunk) : chunk + response = response.concat(messageChunk) } } catch (error) { console.error('Error during streaming:', error) @@ -1228,7 +1749,9 @@ class Agent_Agentflow implements INode { sourceDocuments: Array, artifacts: any[], additionalTokens: number = 0, - isWaitingForHumanInput: boolean = false + isWaitingForHumanInput: boolean = false, + fileAnnotations: any[] = [], + isStructuredOutput: boolean = false ): any { const output: any = { content: finalResponse, @@ -1259,6 +1782,19 @@ class Agent_Agentflow implements INode { } } + if (response.response_metadata) { + output.responseMetadata = response.response_metadata + } + + if (isStructuredOutput && typeof response === 'object') { + const structuredOutput = response as Record + for (const key in structuredOutput) { + if (structuredOutput[key] !== undefined && structuredOutput[key] !== null) { + output[key] = structuredOutput[key] + } + } + } + // Add used tools, source documents and artifacts to output if (usedTools && usedTools.length > 0) { output.usedTools = flatten(usedTools) @@ -1280,6 +1816,10 @@ class Agent_Agentflow implements INode { output.isWaitingForHumanInput = isWaitingForHumanInput } + if (fileAnnotations && fileAnnotations.length > 0) { + output.fileAnnotations = fileAnnotations + } + return output } @@ -1290,7 +1830,12 @@ class Agent_Agentflow implements INode { const sseStreamer: IServerSideEventStreamer = options.sseStreamer as IServerSideEventStreamer if (response.tool_calls) { - sseStreamer.streamCalledToolsEvent(chatId, response.tool_calls) + const formattedToolCalls = response.tool_calls.map((toolCall: any) => ({ + tool: toolCall.name || 'tool', + toolInput: toolCall.args, + toolOutput: '' + })) + sseStreamer.streamCalledToolsEvent(chatId, flatten(formattedToolCalls)) } if (response.usage_metadata) { @@ -1315,7 +1860,8 @@ class Agent_Agentflow implements INode { llmNodeInstance, isStreamable, isLastNode, - iterationContext + iterationContext, + isStructuredOutput = false }: { response: AIMessageChunk messages: BaseMessageLike[] @@ -1329,6 +1875,7 @@ class Agent_Agentflow implements INode { isStreamable: boolean isLastNode: boolean iterationContext: ICommonObject + isStructuredOutput?: boolean }): Promise<{ response: AIMessageChunk usedTools: IUsedTool[] @@ -1339,6 +1886,10 @@ class Agent_Agentflow implements INode { }> { // Track total tokens used throughout this process let totalTokens = response.usage_metadata?.total_tokens || 0 + const usedTools: IUsedTool[] = [] + let sourceDocuments: Array = [] + let artifacts: any[] = [] + let isWaitingForHumanInput: boolean | undefined if (!response.tool_calls || response.tool_calls.length === 0) { return { response, usedTools: [], sourceDocuments: [], artifacts: [], totalTokens } @@ -1346,7 +1897,30 @@ class Agent_Agentflow implements INode { // Stream tool calls if available if (sseStreamer) { - sseStreamer.streamCalledToolsEvent(chatId, JSON.stringify(response.tool_calls)) + const formattedToolCalls = response.tool_calls.map((toolCall: any) => ({ + tool: toolCall.name || 'tool', + toolInput: toolCall.args, + toolOutput: '' + })) + sseStreamer.streamCalledToolsEvent(chatId, flatten(formattedToolCalls)) + } + + // Remove tool calls with no id + const toBeRemovedToolCalls = [] + for (let i = 0; i < response.tool_calls.length; i++) { + const toolCall = response.tool_calls[i] + if (!toolCall.id) { + toBeRemovedToolCalls.push(toolCall) + usedTools.push({ + tool: toolCall.name || 'tool', + toolInput: toolCall.args, + toolOutput: response.content + }) + } + } + + for (const toolCall of toBeRemovedToolCalls) { + response.tool_calls.splice(response.tool_calls.indexOf(toolCall), 1) } // Add LLM response with tool calls to messages @@ -1358,10 +1932,6 @@ class Agent_Agentflow implements INode { usage_metadata: response.usage_metadata }) - const usedTools: IUsedTool[] = [] - let sourceDocuments: Array = [] - let artifacts: any[] = [] - // Process each tool call for (let i = 0; i < response.tool_calls.length; i++) { const toolCall = response.tool_calls[i] @@ -1374,6 +1944,7 @@ class Agent_Agentflow implements INode { (selectedTool as any).requiresHumanInput && (!iterationContext || Object.keys(iterationContext).length === 0) const flowConfig = { + chatflowId: options.chatflowid, sessionId: options.sessionId, chatId: options.chatId, input: input, @@ -1384,14 +1955,25 @@ class Agent_Agentflow implements INode { const toolCallDetails = '```json\n' + JSON.stringify(toolCall, null, 2) + '\n```' const responseContent = response.content + `\nAttempting to use tool:\n${toolCallDetails}` response.content = responseContent - sseStreamer?.streamTokenEvent(chatId, responseContent) + if (!isStructuredOutput) { + sseStreamer?.streamTokenEvent(chatId, responseContent) + } return { response, usedTools, sourceDocuments, artifacts, totalTokens, isWaitingForHumanInput: true } } + let toolIds: ICommonObject | undefined + if (options.analyticHandlers) { + toolIds = await options.analyticHandlers.onToolStart(toolCall.name, toolCall.args, options.parentTraceIds) + } + try { //@ts-ignore let toolOutput = await selectedTool.call(toolCall.args, { signal: abortController?.signal }, undefined, flowConfig) + if (options.analyticHandlers && toolIds) { + await options.analyticHandlers.onToolEnd(toolIds, toolOutput) + } + // Extract source documents if present if (typeof toolOutput === 'string' && toolOutput.includes(SOURCE_DOCUMENTS_PREFIX)) { const [output, docs] = toolOutput.split(SOURCE_DOCUMENTS_PREFIX) @@ -1416,6 +1998,17 @@ class Agent_Agentflow implements INode { } } + let toolInput + if (typeof toolOutput === 'string' && toolOutput.includes(TOOL_ARGS_PREFIX)) { + const [output, args] = toolOutput.split(TOOL_ARGS_PREFIX) + toolOutput = output + try { + toolInput = JSON.parse(args) + } catch (e) { + console.error('Error parsing tool input from tool:', e) + } + } + // Add tool message to conversation messages.push({ role: 'tool', @@ -1431,14 +2024,29 @@ class Agent_Agentflow implements INode { // Track used tools usedTools.push({ tool: toolCall.name, - toolInput: toolCall.args, + toolInput: toolInput ?? toolCall.args, toolOutput }) } catch (e) { + if (options.analyticHandlers && toolIds) { + await options.analyticHandlers.onToolEnd(toolIds, e) + } + console.error('Error invoking tool:', e) + const errMsg = getErrorMessage(e) + let toolInput = toolCall.args + if (typeof errMsg === 'string' && errMsg.includes(TOOL_ARGS_PREFIX)) { + const [_, args] = errMsg.split(TOOL_ARGS_PREFIX) + try { + toolInput = JSON.parse(args) + } catch (e) { + console.error('Error parsing tool input from tool:', e) + } + } + usedTools.push({ tool: selectedTool.name, - toolInput: toolCall.args, + toolInput, toolOutput: '', error: getErrorMessage(e) }) @@ -1455,7 +2063,7 @@ class Agent_Agentflow implements INode { const lastToolOutput = usedTools[0]?.toolOutput || '' const lastToolOutputString = typeof lastToolOutput === 'string' ? lastToolOutput : JSON.stringify(lastToolOutput, null, 2) - if (sseStreamer) { + if (sseStreamer && !isStructuredOutput) { sseStreamer.streamTokenEvent(chatId, lastToolOutputString) } @@ -1469,264 +2077,34 @@ class Agent_Agentflow implements INode { } } + if (response.tool_calls.length === 0) { + const responseContent = typeof response.content === 'string' ? response.content : JSON.stringify(response.content, null, 2) + return { + response: new AIMessageChunk(responseContent), + usedTools, + sourceDocuments, + artifacts, + totalTokens + } + } + // Get LLM response after tool calls let newResponse: AIMessageChunk if (isStreamable) { - newResponse = await this.handleStreamingResponse(sseStreamer, llmNodeInstance, messages, chatId, abortController) - } else { - newResponse = await llmNodeInstance.invoke(messages, { signal: abortController?.signal }) - - // Stream non-streaming response if this is the last node - if (isLastNode && sseStreamer) { - let responseContent = JSON.stringify(newResponse, null, 2) - if (typeof newResponse.content === 'string') { - responseContent = newResponse.content - } - sseStreamer.streamTokenEvent(chatId, responseContent) - } - } - - // Add tokens from this response - if (newResponse.usage_metadata?.total_tokens) { - totalTokens += newResponse.usage_metadata.total_tokens - } - - // Check for recursive tool calls and handle them - if (newResponse.tool_calls && newResponse.tool_calls.length > 0) { - const { - response: recursiveResponse, - usedTools: recursiveUsedTools, - sourceDocuments: recursiveSourceDocuments, - artifacts: recursiveArtifacts, - totalTokens: recursiveTokens - } = await this.handleToolCalls({ - response: newResponse, - messages, - toolsInstance, + newResponse = await this.handleStreamingResponse( sseStreamer, - chatId, - input, - options, - abortController, llmNodeInstance, - isStreamable, - isLastNode, - iterationContext - }) - - // Merge results from recursive tool calls - newResponse = recursiveResponse - usedTools.push(...recursiveUsedTools) - sourceDocuments = [...sourceDocuments, ...recursiveSourceDocuments] - artifacts = [...artifacts, ...recursiveArtifacts] - totalTokens += recursiveTokens - } - - return { response: newResponse, usedTools, sourceDocuments, artifacts, totalTokens } - } - - /** - * Handles tool calls and their responses, with support for recursive tool calling - */ - private async handleResumedToolCalls({ - humanInput, - humanInputAction, - messages, - toolsInstance, - sseStreamer, - chatId, - input, - options, - abortController, - llmWithoutToolsBind, - isStreamable, - isLastNode, - iterationContext - }: { - humanInput: IHumanInput - humanInputAction: Record | undefined - messages: BaseMessageLike[] - toolsInstance: Tool[] - sseStreamer: IServerSideEventStreamer | undefined - chatId: string - input: string | Record - options: ICommonObject - abortController: AbortController - llmWithoutToolsBind: BaseChatModel - isStreamable: boolean - isLastNode: boolean - iterationContext: ICommonObject - }): Promise<{ - response: AIMessageChunk - usedTools: IUsedTool[] - sourceDocuments: Array - artifacts: any[] - totalTokens: number - isWaitingForHumanInput?: boolean - }> { - let llmNodeInstance = llmWithoutToolsBind - - const lastCheckpointMessages = humanInputAction?.data?.input?.messages ?? [] - if (!lastCheckpointMessages.length) { - return { response: new AIMessageChunk(''), usedTools: [], sourceDocuments: [], artifacts: [], totalTokens: 0 } - } - - // Use the last message as the response - const response = lastCheckpointMessages[lastCheckpointMessages.length - 1] as AIMessageChunk - - // Replace messages array - messages.length = 0 - messages.push(...lastCheckpointMessages.slice(0, lastCheckpointMessages.length - 1)) - - // Track total tokens used throughout this process - let totalTokens = response.usage_metadata?.total_tokens || 0 - - if (!response.tool_calls || response.tool_calls.length === 0) { - return { response, usedTools: [], sourceDocuments: [], artifacts: [], totalTokens } - } - - // Stream tool calls if available - if (sseStreamer) { - sseStreamer.streamCalledToolsEvent(chatId, JSON.stringify(response.tool_calls)) - } - - // Add LLM response with tool calls to messages - messages.push({ - id: response.id, - role: 'assistant', - content: response.content, - tool_calls: response.tool_calls, - usage_metadata: response.usage_metadata - }) - - const usedTools: IUsedTool[] = [] - let sourceDocuments: Array = [] - let artifacts: any[] = [] - let isWaitingForHumanInput: boolean | undefined - - // Process each tool call - for (let i = 0; i < response.tool_calls.length; i++) { - const toolCall = response.tool_calls[i] - - const selectedTool = toolsInstance.find((tool) => tool.name === toolCall.name) - if (selectedTool) { - let parsedDocs - let parsedArtifacts - - const flowConfig = { - sessionId: options.sessionId, - chatId: options.chatId, - input: input, - state: options.agentflowRuntime?.state - } - - if (humanInput.type === 'reject') { - messages.pop() - toolsInstance = toolsInstance.filter((tool) => tool.name !== toolCall.name) - } - if (humanInput.type === 'proceed') { - try { - //@ts-ignore - let toolOutput = await selectedTool.call(toolCall.args, { signal: abortController?.signal }, undefined, flowConfig) - - // Extract source documents if present - if (typeof toolOutput === 'string' && toolOutput.includes(SOURCE_DOCUMENTS_PREFIX)) { - const [output, docs] = toolOutput.split(SOURCE_DOCUMENTS_PREFIX) - toolOutput = output - try { - parsedDocs = JSON.parse(docs) - sourceDocuments.push(parsedDocs) - } catch (e) { - console.error('Error parsing source documents from tool:', e) - } - } - - // Extract artifacts if present - if (typeof toolOutput === 'string' && toolOutput.includes(ARTIFACTS_PREFIX)) { - const [output, artifact] = toolOutput.split(ARTIFACTS_PREFIX) - toolOutput = output - try { - parsedArtifacts = JSON.parse(artifact) - artifacts.push(parsedArtifacts) - } catch (e) { - console.error('Error parsing artifacts from tool:', e) - } - } - - // Add tool message to conversation - messages.push({ - role: 'tool', - content: toolOutput, - tool_call_id: toolCall.id, - name: toolCall.name, - additional_kwargs: { - artifacts: parsedArtifacts, - sourceDocuments: parsedDocs - } - }) - - // Track used tools - usedTools.push({ - tool: toolCall.name, - toolInput: toolCall.args, - toolOutput - }) - } catch (e) { - console.error('Error invoking tool:', e) - usedTools.push({ - tool: selectedTool.name, - toolInput: toolCall.args, - toolOutput: '', - error: getErrorMessage(e) - }) - sseStreamer?.streamUsedToolsEvent(chatId, flatten(usedTools)) - throw new Error(getErrorMessage(e)) - } - } - } - } - - // Return direct tool output if there's exactly one tool with returnDirect - if (response.tool_calls.length === 1) { - const selectedTool = toolsInstance.find((tool) => tool.name === response.tool_calls?.[0]?.name) - if (selectedTool && selectedTool.returnDirect) { - const lastToolOutput = usedTools[0]?.toolOutput || '' - const lastToolOutputString = typeof lastToolOutput === 'string' ? lastToolOutput : JSON.stringify(lastToolOutput, null, 2) - - if (sseStreamer) { - sseStreamer.streamTokenEvent(chatId, lastToolOutputString) - } - - return { - response: new AIMessageChunk(lastToolOutputString), - usedTools, - sourceDocuments, - artifacts, - totalTokens - } - } - } - - // Get LLM response after tool calls - let newResponse: AIMessageChunk - - if (llmNodeInstance && toolsInstance.length > 0) { - if (llmNodeInstance.bindTools === undefined) { - throw new Error(`Agent needs to have a function calling capable models.`) - } - - // @ts-ignore - llmNodeInstance = llmNodeInstance.bindTools(toolsInstance) - } - - if (isStreamable) { - newResponse = await this.handleStreamingResponse(sseStreamer, llmNodeInstance, messages, chatId, abortController) + messages, + chatId, + abortController, + isStructuredOutput + ) } else { newResponse = await llmNodeInstance.invoke(messages, { signal: abortController?.signal }) // Stream non-streaming response if this is the last node - if (isLastNode && sseStreamer) { + if (isLastNode && sseStreamer && !isStructuredOutput) { let responseContent = JSON.stringify(newResponse, null, 2) if (typeof newResponse.content === 'string') { responseContent = newResponse.content @@ -1761,7 +2139,8 @@ class Agent_Agentflow implements INode { llmNodeInstance, isStreamable, isLastNode, - iterationContext + iterationContext, + isStructuredOutput }) // Merge results from recursive tool calls @@ -1775,6 +2154,553 @@ class Agent_Agentflow implements INode { return { response: newResponse, usedTools, sourceDocuments, artifacts, totalTokens, isWaitingForHumanInput } } + + /** + * Handles tool calls and their responses, with support for recursive tool calling + */ + private async handleResumedToolCalls({ + humanInput, + humanInputAction, + messages, + toolsInstance, + sseStreamer, + chatId, + input, + options, + abortController, + llmWithoutToolsBind, + isStreamable, + isLastNode, + iterationContext, + isStructuredOutput = false + }: { + humanInput: IHumanInput + humanInputAction: Record | undefined + messages: BaseMessageLike[] + toolsInstance: Tool[] + sseStreamer: IServerSideEventStreamer | undefined + chatId: string + input: string | Record + options: ICommonObject + abortController: AbortController + llmWithoutToolsBind: BaseChatModel + isStreamable: boolean + isLastNode: boolean + iterationContext: ICommonObject + isStructuredOutput?: boolean + }): Promise<{ + response: AIMessageChunk + usedTools: IUsedTool[] + sourceDocuments: Array + artifacts: any[] + totalTokens: number + isWaitingForHumanInput?: boolean + }> { + let llmNodeInstance = llmWithoutToolsBind + const usedTools: IUsedTool[] = [] + let sourceDocuments: Array = [] + let artifacts: any[] = [] + let isWaitingForHumanInput: boolean | undefined + + const lastCheckpointMessages = humanInputAction?.data?.input?.messages ?? [] + if (!lastCheckpointMessages.length) { + return { response: new AIMessageChunk(''), usedTools: [], sourceDocuments: [], artifacts: [], totalTokens: 0 } + } + + // Use the last message as the response + const response = lastCheckpointMessages[lastCheckpointMessages.length - 1] as AIMessageChunk + + // Replace messages array + messages.length = 0 + messages.push(...lastCheckpointMessages.slice(0, lastCheckpointMessages.length - 1)) + + // Track total tokens used throughout this process + let totalTokens = response.usage_metadata?.total_tokens || 0 + + if (!response.tool_calls || response.tool_calls.length === 0) { + return { response, usedTools: [], sourceDocuments: [], artifacts: [], totalTokens } + } + + // Stream tool calls if available + if (sseStreamer) { + const formattedToolCalls = response.tool_calls.map((toolCall: any) => ({ + tool: toolCall.name || 'tool', + toolInput: toolCall.args, + toolOutput: '' + })) + sseStreamer.streamCalledToolsEvent(chatId, flatten(formattedToolCalls)) + } + + // Remove tool calls with no id + const toBeRemovedToolCalls = [] + for (let i = 0; i < response.tool_calls.length; i++) { + const toolCall = response.tool_calls[i] + if (!toolCall.id) { + toBeRemovedToolCalls.push(toolCall) + usedTools.push({ + tool: toolCall.name || 'tool', + toolInput: toolCall.args, + toolOutput: response.content + }) + } + } + + for (const toolCall of toBeRemovedToolCalls) { + response.tool_calls.splice(response.tool_calls.indexOf(toolCall), 1) + } + + // Add LLM response with tool calls to messages + messages.push({ + id: response.id, + role: 'assistant', + content: response.content, + tool_calls: response.tool_calls, + usage_metadata: response.usage_metadata + }) + + // Process each tool call + for (let i = 0; i < response.tool_calls.length; i++) { + const toolCall = response.tool_calls[i] + + const selectedTool = toolsInstance.find((tool) => tool.name === toolCall.name) + if (selectedTool) { + let parsedDocs + let parsedArtifacts + + const flowConfig = { + chatflowId: options.chatflowid, + sessionId: options.sessionId, + chatId: options.chatId, + input: input, + state: options.agentflowRuntime?.state + } + + if (humanInput.type === 'reject') { + messages.pop() + const toBeRemovedTool = toolsInstance.find((tool) => tool.name === toolCall.name) + if (toBeRemovedTool) { + toolsInstance = toolsInstance.filter((tool) => tool.name !== toolCall.name) + // Remove other tools with the same agentSelectedTool such as MCP tools + toolsInstance = toolsInstance.filter( + (tool) => (tool as any).agentSelectedTool !== (toBeRemovedTool as any).agentSelectedTool + ) + } + } + if (humanInput.type === 'proceed') { + let toolIds: ICommonObject | undefined + if (options.analyticHandlers) { + toolIds = await options.analyticHandlers.onToolStart(toolCall.name, toolCall.args, options.parentTraceIds) + } + + try { + //@ts-ignore + let toolOutput = await selectedTool.call(toolCall.args, { signal: abortController?.signal }, undefined, flowConfig) + + if (options.analyticHandlers && toolIds) { + await options.analyticHandlers.onToolEnd(toolIds, toolOutput) + } + + // Extract source documents if present + if (typeof toolOutput === 'string' && toolOutput.includes(SOURCE_DOCUMENTS_PREFIX)) { + const [output, docs] = toolOutput.split(SOURCE_DOCUMENTS_PREFIX) + toolOutput = output + try { + parsedDocs = JSON.parse(docs) + sourceDocuments.push(parsedDocs) + } catch (e) { + console.error('Error parsing source documents from tool:', e) + } + } + + // Extract artifacts if present + if (typeof toolOutput === 'string' && toolOutput.includes(ARTIFACTS_PREFIX)) { + const [output, artifact] = toolOutput.split(ARTIFACTS_PREFIX) + toolOutput = output + try { + parsedArtifacts = JSON.parse(artifact) + artifacts.push(parsedArtifacts) + } catch (e) { + console.error('Error parsing artifacts from tool:', e) + } + } + + let toolInput + if (typeof toolOutput === 'string' && toolOutput.includes(TOOL_ARGS_PREFIX)) { + const [output, args] = toolOutput.split(TOOL_ARGS_PREFIX) + toolOutput = output + try { + toolInput = JSON.parse(args) + } catch (e) { + console.error('Error parsing tool input from tool:', e) + } + } + + // Add tool message to conversation + messages.push({ + role: 'tool', + content: toolOutput, + tool_call_id: toolCall.id, + name: toolCall.name, + additional_kwargs: { + artifacts: parsedArtifacts, + sourceDocuments: parsedDocs + } + }) + + // Track used tools + usedTools.push({ + tool: toolCall.name, + toolInput: toolInput ?? toolCall.args, + toolOutput + }) + } catch (e) { + if (options.analyticHandlers && toolIds) { + await options.analyticHandlers.onToolEnd(toolIds, e) + } + + console.error('Error invoking tool:', e) + const errMsg = getErrorMessage(e) + let toolInput = toolCall.args + if (typeof errMsg === 'string' && errMsg.includes(TOOL_ARGS_PREFIX)) { + const [_, args] = errMsg.split(TOOL_ARGS_PREFIX) + try { + toolInput = JSON.parse(args) + } catch (e) { + console.error('Error parsing tool input from tool:', e) + } + } + + usedTools.push({ + tool: selectedTool.name, + toolInput, + toolOutput: '', + error: getErrorMessage(e) + }) + sseStreamer?.streamUsedToolsEvent(chatId, flatten(usedTools)) + throw new Error(getErrorMessage(e)) + } + } + } + } + + // Return direct tool output if there's exactly one tool with returnDirect + if (response.tool_calls.length === 1) { + const selectedTool = toolsInstance.find((tool) => tool.name === response.tool_calls?.[0]?.name) + if (selectedTool && selectedTool.returnDirect) { + const lastToolOutput = usedTools[0]?.toolOutput || '' + const lastToolOutputString = typeof lastToolOutput === 'string' ? lastToolOutput : JSON.stringify(lastToolOutput, null, 2) + + if (sseStreamer && !isStructuredOutput) { + sseStreamer.streamTokenEvent(chatId, lastToolOutputString) + } + + return { + response: new AIMessageChunk(lastToolOutputString), + usedTools, + sourceDocuments, + artifacts, + totalTokens + } + } + } + + // Get LLM response after tool calls + let newResponse: AIMessageChunk + + if (llmNodeInstance && (llmNodeInstance as any).builtInTools && (llmNodeInstance as any).builtInTools.length > 0) { + toolsInstance.push(...(llmNodeInstance as any).builtInTools) + } + + if (llmNodeInstance && toolsInstance.length > 0) { + if (llmNodeInstance.bindTools === undefined) { + throw new Error(`Agent needs to have a function calling capable models.`) + } + + // @ts-ignore + llmNodeInstance = llmNodeInstance.bindTools(toolsInstance) + } + + if (isStreamable) { + newResponse = await this.handleStreamingResponse( + sseStreamer, + llmNodeInstance, + messages, + chatId, + abortController, + isStructuredOutput + ) + } else { + newResponse = await llmNodeInstance.invoke(messages, { signal: abortController?.signal }) + + // Stream non-streaming response if this is the last node + if (isLastNode && sseStreamer && !isStructuredOutput) { + let responseContent = JSON.stringify(newResponse, null, 2) + if (typeof newResponse.content === 'string') { + responseContent = newResponse.content + } + sseStreamer.streamTokenEvent(chatId, responseContent) + } + } + + // Add tokens from this response + if (newResponse.usage_metadata?.total_tokens) { + totalTokens += newResponse.usage_metadata.total_tokens + } + + // Check for recursive tool calls and handle them + if (newResponse.tool_calls && newResponse.tool_calls.length > 0) { + const { + response: recursiveResponse, + usedTools: recursiveUsedTools, + sourceDocuments: recursiveSourceDocuments, + artifacts: recursiveArtifacts, + totalTokens: recursiveTokens, + isWaitingForHumanInput: recursiveIsWaitingForHumanInput + } = await this.handleToolCalls({ + response: newResponse, + messages, + toolsInstance, + sseStreamer, + chatId, + input, + options, + abortController, + llmNodeInstance, + isStreamable, + isLastNode, + iterationContext, + isStructuredOutput + }) + + // Merge results from recursive tool calls + newResponse = recursiveResponse + usedTools.push(...recursiveUsedTools) + sourceDocuments = [...sourceDocuments, ...recursiveSourceDocuments] + artifacts = [...artifacts, ...recursiveArtifacts] + totalTokens += recursiveTokens + isWaitingForHumanInput = recursiveIsWaitingForHumanInput + } + + return { response: newResponse, usedTools, sourceDocuments, artifacts, totalTokens, isWaitingForHumanInput } + } + + /** + * Extracts artifacts from response metadata (both annotations and built-in tools) + */ + private async extractArtifactsFromResponse( + responseMetadata: any, + modelNodeData: INodeData, + options: ICommonObject + ): Promise<{ artifacts: any[]; fileAnnotations: any[] }> { + const artifacts: any[] = [] + const fileAnnotations: any[] = [] + + if (!responseMetadata?.output || !Array.isArray(responseMetadata.output)) { + return { artifacts, fileAnnotations } + } + + for (const outputItem of responseMetadata.output) { + // Handle container file citations from annotations + if (outputItem.type === 'message' && outputItem.content && Array.isArray(outputItem.content)) { + for (const contentItem of outputItem.content) { + if (contentItem.annotations && Array.isArray(contentItem.annotations)) { + for (const annotation of contentItem.annotations) { + if (annotation.type === 'container_file_citation' && annotation.file_id && annotation.filename) { + try { + // Download and store the file content + const downloadResult = await this.downloadContainerFile( + annotation.container_id, + annotation.file_id, + annotation.filename, + modelNodeData, + options + ) + + if (downloadResult) { + const fileType = this.getArtifactTypeFromFilename(annotation.filename) + + if (fileType === 'png' || fileType === 'jpeg' || fileType === 'jpg') { + const artifact = { + type: fileType, + data: downloadResult.filePath + } + + artifacts.push(artifact) + } else { + fileAnnotations.push({ + filePath: downloadResult.filePath, + fileName: annotation.filename + }) + } + } + } catch (error) { + console.error('Error processing annotation:', error) + } + } + } + } + } + } + + // Handle built-in tool artifacts (like image generation) + if (outputItem.type === 'image_generation_call' && outputItem.result) { + try { + const savedImageResult = await this.saveBase64Image(outputItem, options) + if (savedImageResult) { + // Replace the base64 result with the file path in the response metadata + outputItem.result = savedImageResult.filePath + + // Create artifact in the same format as other image artifacts + const fileType = this.getArtifactTypeFromFilename(savedImageResult.fileName) + artifacts.push({ + type: fileType, + data: savedImageResult.filePath + }) + } + } catch (error) { + console.error('Error processing image generation artifact:', error) + } + } + } + + return { artifacts, fileAnnotations } + } + + /** + * Downloads file content from container file citation + */ + private async downloadContainerFile( + containerId: string, + fileId: string, + filename: string, + modelNodeData: INodeData, + options: ICommonObject + ): Promise<{ filePath: string; totalSize: number } | null> { + try { + const credentialData = await getCredentialData(modelNodeData.credential ?? '', options) + const openAIApiKey = getCredentialParam('openAIApiKey', credentialData, modelNodeData) + + if (!openAIApiKey) { + console.warn('No OpenAI API key available for downloading container file') + return null + } + + // Download the file using OpenAI Container API + const response = await fetch(`https://api.openai.com/v1/containers/${containerId}/files/${fileId}/content`, { + method: 'GET', + headers: { + Accept: '*/*', + Authorization: `Bearer ${openAIApiKey}` + } + }) + + if (!response.ok) { + console.warn( + `Failed to download container file ${fileId} from container ${containerId}: ${response.status} ${response.statusText}` + ) + return null + } + + // Extract the binary data from the Response object + const data = await response.arrayBuffer() + const dataBuffer = Buffer.from(data) + const mimeType = this.getMimeTypeFromFilename(filename) + + // Store the file using the same storage utility as OpenAIAssistant + const { path, totalSize } = await addSingleFileToStorage( + mimeType, + dataBuffer, + filename, + options.orgId, + options.chatflowid, + options.chatId + ) + + return { filePath: path, totalSize } + } catch (error) { + console.error('Error downloading container file:', error) + return null + } + } + + /** + * Gets MIME type from filename extension + */ + private getMimeTypeFromFilename(filename: string): string { + const extension = filename.toLowerCase().split('.').pop() + const mimeTypes: { [key: string]: string } = { + png: 'image/png', + jpg: 'image/jpeg', + jpeg: 'image/jpeg', + gif: 'image/gif', + pdf: 'application/pdf', + txt: 'text/plain', + csv: 'text/csv', + json: 'application/json', + html: 'text/html', + xml: 'application/xml' + } + return mimeTypes[extension || ''] || 'application/octet-stream' + } + + /** + * Gets artifact type from filename extension for UI rendering + */ + private getArtifactTypeFromFilename(filename: string): string { + const extension = filename.toLowerCase().split('.').pop() + const artifactTypes: { [key: string]: string } = { + png: 'png', + jpg: 'jpeg', + jpeg: 'jpeg', + html: 'html', + htm: 'html', + md: 'markdown', + markdown: 'markdown', + json: 'json', + js: 'javascript', + javascript: 'javascript', + tex: 'latex', + latex: 'latex', + txt: 'text', + csv: 'text', + pdf: 'text' + } + return artifactTypes[extension || ''] || 'text' + } + + /** + * Processes sandbox links in the response text and converts them to file annotations + */ + private async processSandboxLinks(text: string, baseURL: string, chatflowId: string, chatId: string): Promise { + let processedResponse = text + + // Regex to match sandbox links: [text](sandbox:/path/to/file) + const sandboxLinkRegex = /\[([^\]]+)\]\(sandbox:\/([^)]+)\)/g + const matches = Array.from(text.matchAll(sandboxLinkRegex)) + + for (const match of matches) { + const fullMatch = match[0] + const linkText = match[1] + const filePath = match[2] + + try { + // Extract filename from the file path + const fileName = filePath.split('/').pop() || filePath + + // Replace sandbox link with proper download URL + const downloadUrl = `${baseURL}/api/v1/get-upload-file?chatflowId=${chatflowId}&chatId=${chatId}&fileName=${fileName}&download=true` + const newLink = `[${linkText}](${downloadUrl})` + + processedResponse = processedResponse.replace(fullMatch, newLink) + } catch (error) { + console.error('Error processing sandbox link:', error) + // If there's an error, remove the sandbox link as fallback + processedResponse = processedResponse.replace(fullMatch, linkText) + } + } + + return processedResponse + } } module.exports = { nodeClass: Agent_Agentflow } diff --git a/packages/components/nodes/agentflow/Condition/Condition.ts b/packages/components/nodes/agentflow/Condition/Condition.ts index af2fa0411..7ae1be062 100644 --- a/packages/components/nodes/agentflow/Condition/Condition.ts +++ b/packages/components/nodes/agentflow/Condition/Condition.ts @@ -1,4 +1,5 @@ import { CommonType, ICommonObject, ICondition, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' +import removeMarkdown from 'remove-markdown' class Condition_Agentflow implements INode { label: string @@ -300,8 +301,8 @@ class Condition_Agentflow implements INode { value2 = parseFloat(_value2 as string) || 0 break default: // string - value1 = _value1 as string - value2 = _value2 as string + value1 = removeMarkdown((_value1 as string) || '') + value2 = removeMarkdown((_value2 as string) || '') } const compareOperationResult = compareOperationFunctions[operation](value1, value2) @@ -316,7 +317,7 @@ class Condition_Agentflow implements INode { } } - // If no condition is fullfilled, add isFulfilled to the ELSE condition + // If no condition is fulfilled, add isFulfilled to the ELSE condition const dummyElseConditionData = { type: 'string', value1: '', diff --git a/packages/components/nodes/agentflow/ConditionAgent/ConditionAgent.ts b/packages/components/nodes/agentflow/ConditionAgent/ConditionAgent.ts index 6ec809f96..b23dd198f 100644 --- a/packages/components/nodes/agentflow/ConditionAgent/ConditionAgent.ts +++ b/packages/components/nodes/agentflow/ConditionAgent/ConditionAgent.ts @@ -27,7 +27,7 @@ class ConditionAgent_Agentflow implements INode { constructor() { this.label = 'Condition Agent' this.name = 'conditionAgentAgentflow' - this.version = 1.0 + this.version = 1.1 this.type = 'ConditionAgent' this.category = 'Agent Flows' this.description = `Utilize an agent to split flows based on dynamic conditions` @@ -80,6 +80,26 @@ class ConditionAgent_Agentflow implements INode { scenario: '' } ] + }, + { + label: 'Override System Prompt', + name: 'conditionAgentOverrideSystemPrompt', + type: 'boolean', + description: 'Override initial system prompt for Condition Agent', + optional: true + }, + { + label: 'Node System Prompt', + name: 'conditionAgentSystemPrompt', + type: 'string', + rows: 4, + optional: true, + acceptVariable: true, + default: CONDITION_AGENT_SYSTEM_PROMPT, + description: 'Expert use only. Modifying this can significantly alter agent behavior. Leave default if unsure', + show: { + conditionAgentOverrideSystemPrompt: true + } } /*{ label: 'Enable Memory', @@ -242,6 +262,12 @@ class ConditionAgent_Agentflow implements INode { const conditionAgentInput = nodeData.inputs?.conditionAgentInput as string let input = conditionAgentInput || question const conditionAgentInstructions = nodeData.inputs?.conditionAgentInstructions as string + const conditionAgentSystemPrompt = nodeData.inputs?.conditionAgentSystemPrompt as string + const conditionAgentOverrideSystemPrompt = nodeData.inputs?.conditionAgentOverrideSystemPrompt as boolean + let systemPrompt = CONDITION_AGENT_SYSTEM_PROMPT + if (conditionAgentSystemPrompt && conditionAgentOverrideSystemPrompt) { + systemPrompt = conditionAgentSystemPrompt + } // Extract memory and configuration options const enableMemory = nodeData.inputs?.conditionAgentEnableMemory as boolean @@ -277,31 +303,15 @@ class ConditionAgent_Agentflow implements INode { const messages: BaseMessageLike[] = [ { role: 'system', - content: CONDITION_AGENT_SYSTEM_PROMPT + content: systemPrompt }, { role: 'user', - content: `{"input": "Hello", "scenarios": ["user is asking about AI", "default"], "instruction": "Your task is to check and see if user is asking topic about AI"}` + content: `{"input": "Hello", "scenarios": ["user is asking about AI", "user is not asking about AI"], "instruction": "Your task is to check if the user is asking about AI."}` }, { role: 'assistant', - content: `\`\`\`json\n{"output": "default"}\n\`\`\`` - }, - { - role: 'user', - content: `{"input": "What is AIGC?", "scenarios": ["user is asking about AI", "default"], "instruction": "Your task is to check and see if user is asking topic about AI"}` - }, - { - role: 'assistant', - content: `\`\`\`json\n{"output": "user is asking about AI"}\n\`\`\`` - }, - { - role: 'user', - content: `{"input": "Can you explain deep learning?", "scenarios": ["user is interested in AI topics", "default"], "instruction": "Determine if the user is interested in learning about AI"}` - }, - { - role: 'assistant', - content: `\`\`\`json\n{"output": "user is interested in AI topics"}\n\`\`\`` + content: `\`\`\`json\n{"output": "user is not asking about AI"}\n\`\`\`` } ] // Use to store messages with image file references as we do not want to store the base64 data into database @@ -374,15 +384,19 @@ class ConditionAgent_Agentflow implements INode { ) } - let calledOutputName = 'default' + let calledOutputName: string try { const parsedResponse = this.parseJsonMarkdown(response.content as string) - if (!parsedResponse.output) { - throw new Error('Missing "output" key in response') + if (!parsedResponse.output || typeof parsedResponse.output !== 'string') { + throw new Error('LLM response is missing the "output" key or it is not a string.') } calledOutputName = parsedResponse.output } catch (error) { - console.warn(`Failed to parse LLM response: ${error}. Using default output.`) + throw new Error( + `Failed to parse a valid scenario from the LLM's response. Please check if the model is capable of following JSON output instructions. Raw LLM Response: "${ + response.content as string + }"` + ) } // Clean up empty inputs diff --git a/packages/components/nodes/agentflow/CustomFunction/CustomFunction.ts b/packages/components/nodes/agentflow/CustomFunction/CustomFunction.ts index 6922c651b..e768c7809 100644 --- a/packages/components/nodes/agentflow/CustomFunction/CustomFunction.ts +++ b/packages/components/nodes/agentflow/CustomFunction/CustomFunction.ts @@ -8,8 +8,7 @@ import { INodeParams, IServerSideEventStreamer } from '../../../src/Interface' -import { availableDependencies, defaultAllowBuiltInDep, getVars, prepareSandboxVars } from '../../../src/utils' -import { NodeVM } from '@flowiseai/nodevm' +import { getVars, executeJavaScriptCode, createCodeExecutionSandbox, processTemplateVariables } from '../../../src/utils' import { updateFlowState } from '../utils' interface ICustomFunctionInputVariables { @@ -19,9 +18,9 @@ interface ICustomFunctionInputVariables { const exampleFunc = `/* * You can use any libraries imported in Flowise -* You can use properties specified in Input Schema as variables. Ex: Property = userid, Variable = $userid +* You can use properties specified in Input Variables with the prefix $. For example: $foo * You can get default flow config: $flow.sessionId, $flow.chatId, $flow.chatflowId, $flow.input, $flow.state -* You can get custom variables: $vars. +* You can get global variables: $vars. * Must return a string value at the end of function */ @@ -146,77 +145,51 @@ class CustomFunction_Agentflow implements INode { const appDataSource = options.appDataSource as DataSource const databaseEntities = options.databaseEntities as IDatabaseEntity - // Update flow state if needed - let newState = { ...state } - if (_customFunctionUpdateState && Array.isArray(_customFunctionUpdateState) && _customFunctionUpdateState.length > 0) { - newState = updateFlowState(state, _customFunctionUpdateState) - } - - const variables = await getVars(appDataSource, databaseEntities, nodeData) + const variables = await getVars(appDataSource, databaseEntities, nodeData, options) const flow = { chatflowId: options.chatflowid, sessionId: options.sessionId, chatId: options.chatId, - input + input, + state } - let sandbox: any = { - $input: input, - util: undefined, - Symbol: undefined, - child_process: undefined, - fs: undefined, - process: undefined - } - sandbox['$vars'] = prepareSandboxVars(variables) - sandbox['$flow'] = flow - + // Create additional sandbox variables for custom function inputs + const additionalSandbox: ICommonObject = {} for (const item of functionInputVariables) { const variableName = item.variableName const variableValue = item.variableValue - sandbox[`$${variableName}`] = variableValue + additionalSandbox[`$${variableName}`] = variableValue } - const builtinDeps = process.env.TOOL_FUNCTION_BUILTIN_DEP - ? defaultAllowBuiltInDep.concat(process.env.TOOL_FUNCTION_BUILTIN_DEP.split(',')) - : defaultAllowBuiltInDep - const externalDeps = process.env.TOOL_FUNCTION_EXTERNAL_DEP ? process.env.TOOL_FUNCTION_EXTERNAL_DEP.split(',') : [] - const deps = availableDependencies.concat(externalDeps) + const sandbox = createCodeExecutionSandbox(input, variables, flow, additionalSandbox) - const nodeVMOptions = { - console: 'inherit', - sandbox, - require: { - external: { modules: deps }, - builtin: builtinDeps - }, - eval: false, - wasm: false, - timeout: 10000 - } as any + // Setup streaming function if needed + const streamOutput = isStreamable + ? (output: string) => { + const sseStreamer: IServerSideEventStreamer = options.sseStreamer + sseStreamer.streamTokenEvent(chatId, output) + } + : undefined - const vm = new NodeVM(nodeVMOptions) try { - const response = await vm.run(`module.exports = async function() {${javascriptFunction}}()`, __dirname) + const response = await executeJavaScriptCode(javascriptFunction, sandbox, { + libraries: ['axios'], + streamOutput + }) let finalOutput = response if (typeof response === 'object') { finalOutput = JSON.stringify(response, null, 2) } - if (isStreamable) { - const sseStreamer: IServerSideEventStreamer = options.sseStreamer - sseStreamer.streamTokenEvent(chatId, finalOutput) + // Update flow state if needed + let newState = { ...state } + if (_customFunctionUpdateState && Array.isArray(_customFunctionUpdateState) && _customFunctionUpdateState.length > 0) { + newState = updateFlowState(state, _customFunctionUpdateState) } - // Process template variables in state - if (newState && Object.keys(newState).length > 0) { - for (const key in newState) { - if (newState[key].toString().includes('{{ output }}')) { - newState[key] = finalOutput - } - } - } + newState = processTemplateVariables(newState, finalOutput) const returnOutput = { id: nodeData.id, diff --git a/packages/components/nodes/agentflow/ExecuteFlow/ExecuteFlow.ts b/packages/components/nodes/agentflow/ExecuteFlow/ExecuteFlow.ts index 26e5df7b6..e2f0765ad 100644 --- a/packages/components/nodes/agentflow/ExecuteFlow/ExecuteFlow.ts +++ b/packages/components/nodes/agentflow/ExecuteFlow/ExecuteFlow.ts @@ -8,7 +8,7 @@ import { IServerSideEventStreamer } from '../../../src/Interface' import axios, { AxiosRequestConfig } from 'axios' -import { getCredentialData, getCredentialParam } from '../../../src/utils' +import { getCredentialData, getCredentialParam, processTemplateVariables, parseJsonBody } from '../../../src/utils' import { DataSource } from 'typeorm' import { BaseMessageLike } from '@langchain/core/messages' import { updateFlowState } from '../utils' @@ -30,7 +30,7 @@ class ExecuteFlow_Agentflow implements INode { constructor() { this.label = 'Execute Flow' this.name = 'executeFlowAgentflow' - this.version = 1.0 + this.version = 1.1 this.type = 'ExecuteFlow' this.category = 'Agent Flows' this.description = 'Execute another flow' @@ -62,7 +62,8 @@ class ExecuteFlow_Agentflow implements INode { name: 'executeFlowOverrideConfig', description: 'Override the config passed to the flow', type: 'json', - optional: true + optional: true, + acceptVariable: true }, { label: 'Base URL', @@ -127,7 +128,8 @@ class ExecuteFlow_Agentflow implements INode { return returnData } - const chatflows = await appDataSource.getRepository(databaseEntities['ChatFlow']).find() + const searchOptions = options.searchOptions || {} + const chatflows = await appDataSource.getRepository(databaseEntities['ChatFlow']).findBy(searchOptions) for (let i = 0; i < chatflows.length; i += 1) { let cfType = 'Chatflow' @@ -161,12 +163,15 @@ class ExecuteFlow_Agentflow implements INode { const flowInput = nodeData.inputs?.executeFlowInput as string const returnResponseAs = nodeData.inputs?.executeFlowReturnResponseAs as string const _executeFlowUpdateState = nodeData.inputs?.executeFlowUpdateState - const overrideConfig = - typeof nodeData.inputs?.executeFlowOverrideConfig === 'string' && - nodeData.inputs.executeFlowOverrideConfig.startsWith('{') && - nodeData.inputs.executeFlowOverrideConfig.endsWith('}') - ? JSON.parse(nodeData.inputs.executeFlowOverrideConfig) - : nodeData.inputs?.executeFlowOverrideConfig + + let overrideConfig = nodeData.inputs?.executeFlowOverrideConfig + if (typeof overrideConfig === 'string' && overrideConfig.startsWith('{') && overrideConfig.endsWith('}')) { + try { + overrideConfig = parseJsonBody(overrideConfig) + } catch (parseError) { + throw new Error(`Invalid JSON in executeFlowOverrideConfig: ${parseError.message}`) + } + } const state = options.agentflowRuntime?.state as ICommonObject const runtimeChatHistory = (options.agentflowRuntime?.chatHistory as BaseMessageLike[]) ?? [] @@ -180,7 +185,8 @@ class ExecuteFlow_Agentflow implements INode { if (selectedFlowId === options.chatflowid) throw new Error('Cannot call the same agentflow!') let headers: Record = { - 'Content-Type': 'application/json' + 'Content-Type': 'application/json', + 'flowise-tool': 'true' } if (chatflowApiKey) headers = { ...headers, Authorization: `Bearer ${chatflowApiKey}` } @@ -214,13 +220,7 @@ class ExecuteFlow_Agentflow implements INode { } // Process template variables in state - if (newState && Object.keys(newState).length > 0) { - for (const key in newState) { - if (newState[key].toString().includes('{{ output }}')) { - newState[key] = resultText - } - } - } + newState = processTemplateVariables(newState, resultText) // Only add to runtime chat history if this is the first node const inputMessages = [] diff --git a/packages/components/nodes/agentflow/HTTP/HTTP.ts b/packages/components/nodes/agentflow/HTTP/HTTP.ts index 752d6dd0b..697fb0e31 100644 --- a/packages/components/nodes/agentflow/HTTP/HTTP.ts +++ b/packages/components/nodes/agentflow/HTTP/HTTP.ts @@ -1,8 +1,9 @@ import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' -import axios, { AxiosRequestConfig, Method, ResponseType } from 'axios' +import { AxiosRequestConfig, Method, ResponseType } from 'axios' import FormData from 'form-data' import * as querystring from 'querystring' -import { getCredentialData, getCredentialParam } from '../../../src/utils' +import { getCredentialData, getCredentialParam, parseJsonBody } from '../../../src/utils' +import { secureAxiosRequest } from '../../../src/httpSecurity' class HTTP_Agentflow implements INode { label: string @@ -21,7 +22,7 @@ class HTTP_Agentflow implements INode { constructor() { this.label = 'HTTP' this.name = 'httpAgentflow' - this.version = 1.0 + this.version = 1.1 this.type = 'HTTP' this.category = 'Agent Flows' this.description = 'Send a HTTP request' @@ -66,12 +67,14 @@ class HTTP_Agentflow implements INode { { label: 'URL', name: 'url', - type: 'string' + type: 'string', + acceptVariable: true }, { label: 'Headers', name: 'headers', type: 'array', + acceptVariable: true, array: [ { label: 'Key', @@ -83,7 +86,8 @@ class HTTP_Agentflow implements INode { label: 'Value', name: 'value', type: 'string', - default: '' + default: '', + acceptVariable: true } ], optional: true @@ -92,6 +96,7 @@ class HTTP_Agentflow implements INode { label: 'Query Params', name: 'queryParams', type: 'array', + acceptVariable: true, array: [ { label: 'Key', @@ -103,7 +108,8 @@ class HTTP_Agentflow implements INode { label: 'Value', name: 'value', type: 'string', - default: '' + default: '', + acceptVariable: true } ], optional: true @@ -147,6 +153,7 @@ class HTTP_Agentflow implements INode { label: 'Body', name: 'body', type: 'array', + acceptVariable: true, show: { bodyType: ['xWwwFormUrlencoded', 'formData'] }, @@ -161,7 +168,8 @@ class HTTP_Agentflow implements INode { label: 'Value', name: 'value', type: 'string', - default: '' + default: '', + acceptVariable: true } ], optional: true @@ -220,14 +228,14 @@ class HTTP_Agentflow implements INode { // Add credentials if provided const credentialData = await getCredentialData(nodeData.credential ?? '', options) if (credentialData && Object.keys(credentialData).length !== 0) { - const basicAuthUsername = getCredentialParam('username', credentialData, nodeData) - const basicAuthPassword = getCredentialParam('password', credentialData, nodeData) + const basicAuthUsername = getCredentialParam('basicAuthUsername', credentialData, nodeData) + const basicAuthPassword = getCredentialParam('basicAuthPassword', credentialData, nodeData) const bearerToken = getCredentialParam('token', credentialData, nodeData) const apiKeyName = getCredentialParam('key', credentialData, nodeData) const apiKeyValue = getCredentialParam('value', credentialData, nodeData) // Determine which type of auth to use based on available credentials - if (basicAuthUsername && basicAuthPassword) { + if (basicAuthUsername || basicAuthPassword) { // Basic Auth const auth = Buffer.from(`${basicAuthUsername}:${basicAuthPassword}`).toString('base64') requestHeaders['Authorization'] = `Basic ${auth}` @@ -266,10 +274,11 @@ class HTTP_Agentflow implements INode { // Handle request body based on body type if (method !== 'GET' && body) { switch (bodyType) { - case 'json': - requestConfig.data = typeof body === 'string' ? JSON.parse(body) : body + case 'json': { + requestConfig.data = typeof body === 'string' ? parseJsonBody(body) : body requestHeaders['Content-Type'] = 'application/json' break + } case 'raw': requestConfig.data = body break @@ -284,14 +293,14 @@ class HTTP_Agentflow implements INode { break } case 'xWwwFormUrlencoded': - requestConfig.data = querystring.stringify(typeof body === 'string' ? JSON.parse(body) : body) + requestConfig.data = querystring.stringify(typeof body === 'string' ? parseJsonBody(body) : body) requestHeaders['Content-Type'] = 'application/x-www-form-urlencoded' break } } - // Make the HTTP request - const response = await axios(requestConfig) + // Make the secure HTTP request that validates all URLs in redirect chains + const response = await secureAxiosRequest(requestConfig) // Process response based on response type let responseData @@ -330,6 +339,9 @@ class HTTP_Agentflow implements INode { } catch (error) { console.error('HTTP Request Error:', error) + const errorMessage = + error.response?.data?.message || error.response?.data?.error || error.message || 'An error occurred during the HTTP request' + // Format error response const errorResponse: any = { id: nodeData.id, @@ -347,7 +359,7 @@ class HTTP_Agentflow implements INode { }, error: { name: error.name || 'Error', - message: error.message || 'An error occurred during the HTTP request' + message: errorMessage }, state } @@ -360,7 +372,7 @@ class HTTP_Agentflow implements INode { errorResponse.error.headers = error.response.headers } - throw new Error(error) + throw new Error(errorMessage) } } } diff --git a/packages/components/nodes/agentflow/HumanInput/HumanInput.ts b/packages/components/nodes/agentflow/HumanInput/HumanInput.ts index 6fa388e26..b4811d1c8 100644 --- a/packages/components/nodes/agentflow/HumanInput/HumanInput.ts +++ b/packages/components/nodes/agentflow/HumanInput/HumanInput.ts @@ -208,7 +208,7 @@ class HumanInput_Agentflow implements INode { humanInputDescription = (nodeData.inputs?.humanInputDescription as string) || 'Do you want to proceed?' const messages = [...pastChatHistory, ...runtimeChatHistory] // Find the last message in the messages array - const lastMessage = (messages[messages.length - 1] as any).content || '' + const lastMessage = messages.length > 0 ? (messages[messages.length - 1] as any).content || '' : '' humanInputDescription = `${lastMessage}\n\n${humanInputDescription}` if (isStreamable) { const sseStreamer: IServerSideEventStreamer = options.sseStreamer as IServerSideEventStreamer @@ -241,8 +241,11 @@ class HumanInput_Agentflow implements INode { if (isStreamable) { const sseStreamer: IServerSideEventStreamer = options.sseStreamer as IServerSideEventStreamer for await (const chunk of await llmNodeInstance.stream(messages)) { - sseStreamer.streamTokenEvent(chatId, chunk.content.toString()) - response = response.concat(chunk) + const content = typeof chunk === 'string' ? chunk : chunk.content.toString() + sseStreamer.streamTokenEvent(chatId, content) + + const messageChunk = typeof chunk === 'string' ? new AIMessageChunk(chunk) : chunk + response = response.concat(messageChunk) } humanInputDescription = response.content as string } else { diff --git a/packages/components/nodes/agentflow/Iteration/Iteration.ts b/packages/components/nodes/agentflow/Iteration/Iteration.ts index 048035fb2..145602b93 100644 --- a/packages/components/nodes/agentflow/Iteration/Iteration.ts +++ b/packages/components/nodes/agentflow/Iteration/Iteration.ts @@ -1,4 +1,5 @@ import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' +import { parseJsonBody } from '../../../src/utils' class Iteration_Agentflow implements INode { label: string @@ -39,12 +40,17 @@ class Iteration_Agentflow implements INode { const iterationInput = nodeData.inputs?.iterationInput // Helper function to clean JSON strings with redundant backslashes - const cleanJsonString = (str: string): string => { - return str.replace(/\\(["'[\]{}])/g, '$1') + const safeParseJson = (str: string): string => { + try { + return parseJsonBody(str) + } catch { + // Try parsing after cleaning + return parseJsonBody(str.replace(/\\(["'[\]{}])/g, '$1')) + } } const iterationInputArray = - typeof iterationInput === 'string' && iterationInput !== '' ? JSON.parse(cleanJsonString(iterationInput)) : iterationInput + typeof iterationInput === 'string' && iterationInput !== '' ? safeParseJson(iterationInput) : iterationInput if (!iterationInputArray || !Array.isArray(iterationInputArray)) { throw new Error('Invalid input array') diff --git a/packages/components/nodes/agentflow/LLM/LLM.ts b/packages/components/nodes/agentflow/LLM/LLM.ts index 18f8d187d..a5bf4deb7 100644 --- a/packages/components/nodes/agentflow/LLM/LLM.ts +++ b/packages/components/nodes/agentflow/LLM/LLM.ts @@ -1,10 +1,9 @@ import { BaseChatModel } from '@langchain/core/language_models/chat_models' -import { ICommonObject, INode, INodeData, INodeOptionsValue, INodeParams, IServerSideEventStreamer } from '../../../src/Interface' +import { ICommonObject, IMessage, INode, INodeData, INodeOptionsValue, INodeParams, IServerSideEventStreamer } from '../../../src/Interface' import { AIMessageChunk, BaseMessageLike, MessageContentText } from '@langchain/core/messages' import { DEFAULT_SUMMARIZER_TEMPLATE } from '../prompt' -import { z } from 'zod' import { AnalyticHandler } from '../../../src/handler' -import { ILLMMessage, IStructuredOutput } from '../Interface.Agentflow' +import { ILLMMessage } from '../Interface.Agentflow' import { getPastChatHistoryImageMessages, getUniqueImageMessages, @@ -12,7 +11,8 @@ import { replaceBase64ImagesWithFileReferences, updateFlowState } from '../utils' -import { get } from 'lodash' +import { processTemplateVariables, configureStructuredOutput } from '../../../src/utils' +import { flatten } from 'lodash' class LLM_Agentflow implements INode { label: string @@ -262,6 +262,7 @@ class LLM_Agentflow implements INode { }`, description: 'JSON schema for the structured output', optional: true, + hideCodeExecute: true, show: { 'llmStructuredOutput[$index].type': 'jsonArray' } @@ -358,6 +359,7 @@ class LLM_Agentflow implements INode { const state = options.agentflowRuntime?.state as ICommonObject const pastChatHistory = (options.pastChatHistory as BaseMessageLike[]) ?? [] const runtimeChatHistory = (options.agentflowRuntime?.chatHistory as BaseMessageLike[]) ?? [] + const prependedChatHistory = options.prependedChatHistory as IMessage[] const chatId = options.chatId as string // Initialize the LLM model instance @@ -381,11 +383,27 @@ class LLM_Agentflow implements INode { // Use to keep track of past messages with image file references let pastImageMessagesWithFileRef: BaseMessageLike[] = [] + // Prepend history ONLY if it is the first node + if (prependedChatHistory.length > 0 && !runtimeChatHistory.length) { + for (const msg of prependedChatHistory) { + const role: string = msg.role === 'apiMessage' ? 'assistant' : 'user' + const content: string = msg.content ?? '' + messages.push({ + role, + content + }) + } + } + for (const msg of llmMessages) { const role = msg.role const content = msg.content if (role && content) { - messages.push({ role, content }) + if (role === 'system') { + messages.unshift({ role, content }) + } else { + messages.push({ role, content }) + } } } @@ -410,7 +428,7 @@ class LLM_Agentflow implements INode { /* * If this is the first node: * - Add images to messages if exist - * - Add user message + * - Add user message if it does not exist in the llmMessages array */ if (options.uploads) { const imageContents = await getUniqueImageMessages(options, messages, modelConfig) @@ -421,7 +439,7 @@ class LLM_Agentflow implements INode { } } - if (input && typeof input === 'string') { + if (input && typeof input === 'string' && !llmMessages.some((msg) => msg.role === 'user')) { messages.push({ role: 'user', content: input @@ -433,7 +451,7 @@ class LLM_Agentflow implements INode { // Configure structured output if specified const isStructuredOutput = _llmStructuredOutput && Array.isArray(_llmStructuredOutput) && _llmStructuredOutput.length > 0 if (isStructuredOutput) { - llmNodeInstance = this.configureStructuredOutput(llmNodeInstance, _llmStructuredOutput) + llmNodeInstance = configureStructuredOutput(llmNodeInstance, _llmStructuredOutput) } // Initialize response and determine if streaming is possible @@ -460,11 +478,15 @@ class LLM_Agentflow implements INode { // Stream whole response back to UI if this is the last node if (isLastNode && options.sseStreamer) { const sseStreamer: IServerSideEventStreamer = options.sseStreamer as IServerSideEventStreamer - let responseContent = JSON.stringify(response, null, 2) - if (typeof response.content === 'string') { - responseContent = response.content + let finalResponse = '' + if (response.content && Array.isArray(response.content)) { + finalResponse = response.content.map((item: any) => item.text).join('\n') + } else if (response.content && typeof response.content === 'string') { + finalResponse = response.content + } else { + finalResponse = JSON.stringify(response, null, 2) } - sseStreamer.streamTokenEvent(chatId, responseContent) + sseStreamer.streamTokenEvent(chatId, finalResponse) } } @@ -486,8 +508,15 @@ class LLM_Agentflow implements INode { } // Prepare final response and output object - const finalResponse = (response.content as string) ?? JSON.stringify(response, null, 2) - const output = this.prepareOutputObject(response, finalResponse, startTime, endTime, timeDelta) + let finalResponse = '' + if (response.content && Array.isArray(response.content)) { + finalResponse = response.content.map((item: any) => item.text).join('\n') + } else if (response.content && typeof response.content === 'string') { + finalResponse = response.content + } else { + finalResponse = JSON.stringify(response, null, 2) + } + const output = this.prepareOutputObject(response, finalResponse, startTime, endTime, timeDelta, isStructuredOutput) // End analytics tracking if (analyticHandlers && llmIds) { @@ -500,36 +529,7 @@ class LLM_Agentflow implements INode { } // Process template variables in state - if (newState && Object.keys(newState).length > 0) { - for (const key in newState) { - const stateValue = newState[key].toString() - if (stateValue.includes('{{ output')) { - // Handle simple output replacement - if (stateValue === '{{ output }}') { - newState[key] = finalResponse - continue - } - - // Handle JSON path expressions like {{ output.item1 }} - // eslint-disable-next-line - const match = stateValue.match(/{{[\s]*output\.([\w\.]+)[\s]*}}/) - if (match) { - try { - // Parse the response if it's JSON - const jsonResponse = typeof finalResponse === 'string' ? JSON.parse(finalResponse) : finalResponse - // Get the value using lodash get - const path = match[1] - const value = get(jsonResponse, path) - newState[key] = value ?? stateValue // Fall back to original if path not found - } catch (e) { - // If JSON parsing fails, keep original template - console.warn(`Failed to parse JSON or find path in output: ${e}`) - newState[key] = stateValue - } - } - } - } - } + newState = processTemplateVariables(newState, finalResponse) // Replace the actual messages array with one that includes the file references for images instead of base64 data const messagesWithFileReferences = replaceBase64ImagesWithFileReferences( @@ -545,7 +545,19 @@ class LLM_Agentflow implements INode { inputMessages.push(...runtimeImageMessagesWithFileRef) } if (input && typeof input === 'string') { - inputMessages.push({ role: 'user', content: input }) + if (!enableMemory) { + if (!llmMessages.some((msg) => msg.role === 'user')) { + inputMessages.push({ role: 'user', content: input }) + } else { + llmMessages.map((msg) => { + if (msg.role === 'user') { + inputMessages.push({ role: 'user', content: msg.content }) + } + }) + } + } else { + inputMessages.push({ role: 'user', content: input }) + } } } @@ -742,59 +754,6 @@ class LLM_Agentflow implements INode { } } - /** - * Configures structured output for the LLM - */ - private configureStructuredOutput(llmNodeInstance: BaseChatModel, llmStructuredOutput: IStructuredOutput[]): BaseChatModel { - try { - const zodObj: ICommonObject = {} - for (const sch of llmStructuredOutput) { - if (sch.type === 'string') { - zodObj[sch.key] = z.string().describe(sch.description || '') - } else if (sch.type === 'stringArray') { - zodObj[sch.key] = z.array(z.string()).describe(sch.description || '') - } else if (sch.type === 'number') { - zodObj[sch.key] = z.number().describe(sch.description || '') - } else if (sch.type === 'boolean') { - zodObj[sch.key] = z.boolean().describe(sch.description || '') - } else if (sch.type === 'enum') { - const enumValues = sch.enumValues?.split(',').map((item: string) => item.trim()) || [] - zodObj[sch.key] = z - .enum(enumValues.length ? (enumValues as [string, ...string[]]) : ['default']) - .describe(sch.description || '') - } else if (sch.type === 'jsonArray') { - const jsonSchema = sch.jsonSchema - if (jsonSchema) { - try { - // Parse the JSON schema - const schemaObj = JSON.parse(jsonSchema) - - // Create a Zod schema from the JSON schema - const itemSchema = this.createZodSchemaFromJSON(schemaObj) - - // Create an array schema of the item schema - zodObj[sch.key] = z.array(itemSchema).describe(sch.description || '') - } catch (err) { - console.error(`Error parsing JSON schema for ${sch.key}:`, err) - // Fallback to generic array of records - zodObj[sch.key] = z.array(z.record(z.any())).describe(sch.description || '') - } - } else { - // If no schema provided, use generic array of records - zodObj[sch.key] = z.array(z.record(z.any())).describe(sch.description || '') - } - } - } - const structuredOutput = z.object(zodObj) - - // @ts-ignore - return llmNodeInstance.withStructuredOutput(structuredOutput) - } catch (exception) { - console.error(exception) - return llmNodeInstance - } - } - /** * Handles streaming response from the LLM */ @@ -811,16 +770,20 @@ class LLM_Agentflow implements INode { for await (const chunk of await llmNodeInstance.stream(messages, { signal: abortController?.signal })) { if (sseStreamer) { let content = '' - if (Array.isArray(chunk.content) && chunk.content.length > 0) { + + if (typeof chunk === 'string') { + content = chunk + } else if (Array.isArray(chunk.content) && chunk.content.length > 0) { const contents = chunk.content as MessageContentText[] content = contents.map((item) => item.text).join('') - } else { + } else if (chunk.content) { content = chunk.content.toString() } sseStreamer.streamTokenEvent(chatId, content) } - response = response.concat(chunk) + const messageChunk = typeof chunk === 'string' ? new AIMessageChunk(chunk) : chunk + response = response.concat(messageChunk) } } catch (error) { console.error('Error during streaming:', error) @@ -841,7 +804,8 @@ class LLM_Agentflow implements INode { finalResponse: string, startTime: number, endTime: number, - timeDelta: number + timeDelta: number, + isStructuredOutput: boolean ): any { const output: any = { content: finalResponse, @@ -860,6 +824,15 @@ class LLM_Agentflow implements INode { output.usageMetadata = response.usage_metadata } + if (isStructuredOutput && typeof response === 'object') { + const structuredOutput = response as Record + for (const key in structuredOutput) { + if (structuredOutput[key] !== undefined && structuredOutput[key] !== null) { + output[key] = structuredOutput[key] + } + } + } + return output } @@ -870,7 +843,12 @@ class LLM_Agentflow implements INode { const sseStreamer: IServerSideEventStreamer = options.sseStreamer as IServerSideEventStreamer if (response.tool_calls) { - sseStreamer.streamCalledToolsEvent(chatId, response.tool_calls) + const formattedToolCalls = response.tool_calls.map((toolCall: any) => ({ + tool: toolCall.name || 'tool', + toolInput: toolCall.args, + toolOutput: '' + })) + sseStreamer.streamCalledToolsEvent(chatId, flatten(formattedToolCalls)) } if (response.usage_metadata) { @@ -879,107 +857,6 @@ class LLM_Agentflow implements INode { sseStreamer.streamEndEvent(chatId) } - - /** - * Creates a Zod schema from a JSON schema object - * @param jsonSchema The JSON schema object - * @returns A Zod schema - */ - private createZodSchemaFromJSON(jsonSchema: any): z.ZodTypeAny { - // If the schema is an object with properties, create an object schema - if (typeof jsonSchema === 'object' && jsonSchema !== null) { - const schemaObj: Record = {} - - // Process each property in the schema - for (const [key, value] of Object.entries(jsonSchema)) { - if (value === null) { - // Handle null values - schemaObj[key] = z.null() - } else if (typeof value === 'object' && !Array.isArray(value)) { - // Check if the property has a type definition - if ('type' in value) { - const type = value.type as string - const description = ('description' in value ? (value.description as string) : '') || '' - - // Create the appropriate Zod type based on the type property - if (type === 'string') { - schemaObj[key] = z.string().describe(description) - } else if (type === 'number') { - schemaObj[key] = z.number().describe(description) - } else if (type === 'boolean') { - schemaObj[key] = z.boolean().describe(description) - } else if (type === 'array') { - // If it's an array type, check if items is defined - if ('items' in value && value.items) { - const itemSchema = this.createZodSchemaFromJSON(value.items) - schemaObj[key] = z.array(itemSchema).describe(description) - } else { - // Default to array of any if items not specified - schemaObj[key] = z.array(z.any()).describe(description) - } - } else if (type === 'object') { - // If it's an object type, check if properties is defined - if ('properties' in value && value.properties) { - const nestedSchema = this.createZodSchemaFromJSON(value.properties) - schemaObj[key] = nestedSchema.describe(description) - } else { - // Default to record of any if properties not specified - schemaObj[key] = z.record(z.any()).describe(description) - } - } else { - // Default to any for unknown types - schemaObj[key] = z.any().describe(description) - } - - // Check if the property is optional - if ('optional' in value && value.optional === true) { - schemaObj[key] = schemaObj[key].optional() - } - } else if (Array.isArray(value)) { - // Array values without a type property - if (value.length > 0) { - // If the array has items, recursively create a schema for the first item - const itemSchema = this.createZodSchemaFromJSON(value[0]) - schemaObj[key] = z.array(itemSchema) - } else { - // Empty array, allow any array - schemaObj[key] = z.array(z.any()) - } - } else { - // It's a nested object without a type property, recursively create schema - schemaObj[key] = this.createZodSchemaFromJSON(value) - } - } else if (Array.isArray(value)) { - // Array values - if (value.length > 0) { - // If the array has items, recursively create a schema for the first item - const itemSchema = this.createZodSchemaFromJSON(value[0]) - schemaObj[key] = z.array(itemSchema) - } else { - // Empty array, allow any array - schemaObj[key] = z.array(z.any()) - } - } else { - // For primitive values (which shouldn't be in the schema directly) - // Use the corresponding Zod type - if (typeof value === 'string') { - schemaObj[key] = z.string() - } else if (typeof value === 'number') { - schemaObj[key] = z.number() - } else if (typeof value === 'boolean') { - schemaObj[key] = z.boolean() - } else { - schemaObj[key] = z.any() - } - } - } - - return z.object(schemaObj) - } - - // Fallback to any for unknown types - return z.any() - } } module.exports = { nodeClass: LLM_Agentflow } diff --git a/packages/components/nodes/agentflow/Loop/Loop.ts b/packages/components/nodes/agentflow/Loop/Loop.ts index bc9d7b08d..edf7f5e1d 100644 --- a/packages/components/nodes/agentflow/Loop/Loop.ts +++ b/packages/components/nodes/agentflow/Loop/Loop.ts @@ -1,4 +1,5 @@ import { ICommonObject, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../src/Interface' +import { updateFlowState } from '../utils' class Loop_Agentflow implements INode { label: string @@ -19,7 +20,7 @@ class Loop_Agentflow implements INode { constructor() { this.label = 'Loop' this.name = 'loopAgentflow' - this.version = 1.0 + this.version = 1.1 this.type = 'Loop' this.category = 'Agent Flows' this.description = 'Loop back to a previous node' @@ -40,6 +41,40 @@ class Loop_Agentflow implements INode { name: 'maxLoopCount', type: 'number', default: 5 + }, + { + label: 'Fallback Message', + name: 'fallbackMessage', + type: 'string', + description: 'Message to display if the loop count is exceeded', + placeholder: 'Enter your fallback message here', + rows: 4, + acceptVariable: true, + optional: true + }, + { + label: 'Update Flow State', + name: 'loopUpdateState', + description: 'Update runtime state during the execution of the workflow', + type: 'array', + optional: true, + acceptVariable: true, + array: [ + { + label: 'Key', + name: 'key', + type: 'asyncOptions', + loadMethod: 'listRuntimeStateKeys', + freeSolo: true + }, + { + label: 'Value', + name: 'value', + type: 'string', + acceptVariable: true, + acceptNodeOutputAsVariable: true + } + ] } ] } @@ -58,12 +93,20 @@ class Loop_Agentflow implements INode { }) } return returnOptions + }, + async listRuntimeStateKeys(_: INodeData, options: ICommonObject): Promise { + const previousNodes = options.previousNodes as ICommonObject[] + const startAgentflowNode = previousNodes.find((node) => node.name === 'startAgentflow') + const state = startAgentflowNode?.inputs?.startState as ICommonObject[] + return state.map((item) => ({ label: item.key, name: item.key })) } } async run(nodeData: INodeData, _: string, options: ICommonObject): Promise { const loopBackToNode = nodeData.inputs?.loopBackToNode as string const _maxLoopCount = nodeData.inputs?.maxLoopCount as string + const fallbackMessage = nodeData.inputs?.fallbackMessage as string + const _loopUpdateState = nodeData.inputs?.loopUpdateState const state = options.agentflowRuntime?.state as ICommonObject @@ -75,16 +118,34 @@ class Loop_Agentflow implements INode { maxLoopCount: _maxLoopCount ? parseInt(_maxLoopCount) : 5 } + const finalOutput = 'Loop back to ' + `${loopBackToNodeLabel} (${loopBackToNodeId})` + + // Update flow state if needed + let newState = { ...state } + if (_loopUpdateState && Array.isArray(_loopUpdateState) && _loopUpdateState.length > 0) { + newState = updateFlowState(state, _loopUpdateState) + } + + // Process template variables in state + if (newState && Object.keys(newState).length > 0) { + for (const key in newState) { + if (newState[key].toString().includes('{{ output }}')) { + newState[key] = finalOutput + } + } + } + const returnOutput = { id: nodeData.id, name: this.name, input: data, output: { - content: 'Loop back to ' + `${loopBackToNodeLabel} (${loopBackToNodeId})`, + content: finalOutput, nodeID: loopBackToNodeId, - maxLoopCount: _maxLoopCount ? parseInt(_maxLoopCount) : 5 + maxLoopCount: _maxLoopCount ? parseInt(_maxLoopCount) : 5, + fallbackMessage }, - state + state: newState } return returnOutput diff --git a/packages/components/nodes/agentflow/Retriever/Retriever.ts b/packages/components/nodes/agentflow/Retriever/Retriever.ts index 68420484e..e7ce426c2 100644 --- a/packages/components/nodes/agentflow/Retriever/Retriever.ts +++ b/packages/components/nodes/agentflow/Retriever/Retriever.ts @@ -8,6 +8,7 @@ import { IServerSideEventStreamer } from '../../../src/Interface' import { updateFlowState } from '../utils' +import { processTemplateVariables } from '../../../src/utils' import { DataSource } from 'typeorm' import { BaseRetriever } from '@langchain/core/retrievers' import { Document } from '@langchain/core/documents' @@ -119,7 +120,8 @@ class Retriever_Agentflow implements INode { return returnData } - const stores = await appDataSource.getRepository(databaseEntities['DocumentStore']).find() + const searchOptions = options.searchOptions || {} + const stores = await appDataSource.getRepository(databaseEntities['DocumentStore']).findBy(searchOptions) for (const store of stores) { if (store.status === 'UPSERTED') { const obj = { @@ -196,14 +198,7 @@ class Retriever_Agentflow implements INode { sseStreamer.streamTokenEvent(chatId, finalOutput) } - // Process template variables in state - if (newState && Object.keys(newState).length > 0) { - for (const key in newState) { - if (newState[key].toString().includes('{{ output }}')) { - newState[key] = finalOutput - } - } - } + newState = processTemplateVariables(newState, finalOutput) const returnOutput = { id: nodeData.id, diff --git a/packages/components/nodes/agentflow/Start/Start.ts b/packages/components/nodes/agentflow/Start/Start.ts index 5f6bf8449..833e3b7c2 100644 --- a/packages/components/nodes/agentflow/Start/Start.ts +++ b/packages/components/nodes/agentflow/Start/Start.ts @@ -18,7 +18,7 @@ class Start_Agentflow implements INode { constructor() { this.label = 'Start' this.name = 'startAgentflow' - this.version = 1.0 + this.version = 1.1 this.type = 'Start' this.category = 'Agent Flows' this.description = 'Starting point of the agentflow' @@ -153,6 +153,13 @@ class Start_Agentflow implements INode { optional: true } ] + }, + { + label: 'Persist State', + name: 'startPersistState', + type: 'boolean', + description: 'Persist the state in the same session', + optional: true } ] } @@ -161,6 +168,7 @@ class Start_Agentflow implements INode { const _flowState = nodeData.inputs?.startState as string const startInputType = nodeData.inputs?.startInputType as string const startEphemeralMemory = nodeData.inputs?.startEphemeralMemory as boolean + const startPersistState = nodeData.inputs?.startPersistState as boolean let flowStateArray = [] if (_flowState) { @@ -176,6 +184,13 @@ class Start_Agentflow implements INode { flowState[state.key] = state.value } + const runtimeState = options.agentflowRuntime?.state as ICommonObject + if (startPersistState === true && runtimeState && Object.keys(runtimeState).length) { + for (const state in runtimeState) { + flowState[state] = runtimeState[state] + } + } + const inputData: ICommonObject = {} const outputData: ICommonObject = {} @@ -202,6 +217,10 @@ class Start_Agentflow implements INode { outputData.ephemeralMemory = true } + if (startPersistState) { + outputData.persistState = true + } + const returnOutput = { id: nodeData.id, name: this.name, diff --git a/packages/components/nodes/agentflow/Tool/Tool.ts b/packages/components/nodes/agentflow/Tool/Tool.ts index c3945ff3e..300aaafa1 100644 --- a/packages/components/nodes/agentflow/Tool/Tool.ts +++ b/packages/components/nodes/agentflow/Tool/Tool.ts @@ -1,7 +1,8 @@ import { ICommonObject, INode, INodeData, INodeOptionsValue, INodeParams, IServerSideEventStreamer } from '../../../src/Interface' import { updateFlowState } from '../utils' +import { processTemplateVariables } from '../../../src/utils' import { Tool } from '@langchain/core/tools' -import { ARTIFACTS_PREFIX } from '../../../src/agents' +import { ARTIFACTS_PREFIX, TOOL_ARGS_PREFIX } from '../../../src/agents' import zodToJsonSchema from 'zod-to-json-schema' interface IToolInputArgs { @@ -28,7 +29,7 @@ class Tool_Agentflow implements INode { constructor() { this.label = 'Tool' this.name = 'toolAgentflow' - this.version = 1.0 + this.version = 1.1 this.type = 'Tool' this.category = 'Agent Flows' this.description = 'Tools allow LLM to interact with external systems' @@ -37,7 +38,7 @@ class Tool_Agentflow implements INode { this.inputs = [ { label: 'Tool', - name: 'selectedTool', + name: 'toolAgentflowSelectedTool', type: 'asyncOptions', loadMethod: 'listTools', loadConfig: true @@ -64,7 +65,7 @@ class Tool_Agentflow implements INode { } ], show: { - selectedTool: '.+' + toolAgentflowSelectedTool: '.+' } }, { @@ -124,8 +125,11 @@ class Tool_Agentflow implements INode { }, async listToolInputArgs(nodeData: INodeData, options: ICommonObject): Promise { const currentNode = options.currentNode as ICommonObject - const selectedTool = currentNode?.inputs?.selectedTool as string - const selectedToolConfig = currentNode?.inputs?.selectedToolConfig as ICommonObject + const selectedTool = (currentNode?.inputs?.selectedTool as string) || (currentNode?.inputs?.toolAgentflowSelectedTool as string) + const selectedToolConfig = + (currentNode?.inputs?.selectedToolConfig as ICommonObject) || + (currentNode?.inputs?.toolAgentflowSelectedToolConfig as ICommonObject) || + {} const nodeInstanceFilePath = options.componentNodes[selectedTool].filePath as string @@ -158,7 +162,7 @@ class Tool_Agentflow implements INode { toolInputArgs = { properties: allProperties } } else { // Handle single tool instance - toolInputArgs = toolInstance.schema ? zodToJsonSchema(toolInstance.schema) : {} + toolInputArgs = toolInstance.schema ? zodToJsonSchema(toolInstance.schema as any) : {} } if (toolInputArgs && Object.keys(toolInputArgs).length > 0) { @@ -183,8 +187,11 @@ class Tool_Agentflow implements INode { } async run(nodeData: INodeData, input: string, options: ICommonObject): Promise { - const selectedTool = nodeData.inputs?.selectedTool as string - const selectedToolConfig = nodeData.inputs?.selectedToolConfig as ICommonObject + const selectedTool = (nodeData.inputs?.selectedTool as string) || (nodeData.inputs?.toolAgentflowSelectedTool as string) + const selectedToolConfig = + (nodeData?.inputs?.selectedToolConfig as ICommonObject) || + (nodeData?.inputs?.toolAgentflowSelectedToolConfig as ICommonObject) || + {} const toolInputArgs = nodeData.inputs?.toolInputArgs as IToolInputArgs[] const _toolUpdateState = nodeData.inputs?.toolUpdateState @@ -220,13 +227,55 @@ class Tool_Agentflow implements INode { const toolInstance = (await newToolNodeInstance.init(newNodeData, '', options)) as Tool | Tool[] let toolCallArgs: Record = {} + + const parseInputValue = (value: string): any => { + if (typeof value !== 'string') { + return value + } + + // Remove escape characters (backslashes before special characters) + // ex: \["a", "b", "c", "d", "e"\] + let cleanedValue = value + .replace(/\\"/g, '"') // \" -> " + .replace(/\\\\/g, '\\') // \\ -> \ + .replace(/\\\[/g, '[') // \[ -> [ + .replace(/\\\]/g, ']') // \] -> ] + .replace(/\\\{/g, '{') // \{ -> { + .replace(/\\\}/g, '}') // \} -> } + + // Try to parse as JSON if it looks like JSON/array + if ( + (cleanedValue.startsWith('[') && cleanedValue.endsWith(']')) || + (cleanedValue.startsWith('{') && cleanedValue.endsWith('}')) + ) { + try { + return JSON.parse(cleanedValue) + } catch (e) { + // If parsing fails, return the cleaned value + return cleanedValue + } + } + + return cleanedValue + } + + if (newToolNodeInstance.transformNodeInputsToToolArgs) { + const defaultParams = newToolNodeInstance.transformNodeInputsToToolArgs(newNodeData) + + toolCallArgs = { + ...defaultParams, + ...toolCallArgs + } + } + for (const item of toolInputArgs) { const variableName = item.inputArgName const variableValue = item.inputArgValue - toolCallArgs[variableName] = variableValue + toolCallArgs[variableName] = parseInputValue(variableValue) } const flowConfig = { + chatflowId: options.chatflowid, sessionId: options.sessionId, chatId: options.chatId, input: input, @@ -262,6 +311,17 @@ class Tool_Agentflow implements INode { } } + let toolInput + if (typeof toolOutput === 'string' && toolOutput.includes(TOOL_ARGS_PREFIX)) { + const [output, args] = toolOutput.split(TOOL_ARGS_PREFIX) + toolOutput = output + try { + toolInput = JSON.parse(args) + } catch (e) { + console.error('Error parsing tool input from tool:', e) + } + } + if (typeof toolOutput === 'object') { toolOutput = JSON.stringify(toolOutput, null, 2) } @@ -271,20 +331,13 @@ class Tool_Agentflow implements INode { sseStreamer.streamTokenEvent(chatId, toolOutput) } - // Process template variables in state - if (newState && Object.keys(newState).length > 0) { - for (const key in newState) { - if (newState[key].toString().includes('{{ output }}')) { - newState[key] = toolOutput - } - } - } + newState = processTemplateVariables(newState, toolOutput) const returnOutput = { id: nodeData.id, name: this.name, input: { - toolInputArgs: toolInputArgs, + toolInputArgs: toolInput ?? toolInputArgs, selectedTool: selectedTool }, output: { diff --git a/packages/components/nodes/agentflow/prompt.ts b/packages/components/nodes/agentflow/prompt.ts index a5d9cd893..ee941ae22 100644 --- a/packages/components/nodes/agentflow/prompt.ts +++ b/packages/components/nodes/agentflow/prompt.ts @@ -39,37 +39,38 @@ export const DEFAULT_HUMAN_INPUT_DESCRIPTION_HTML = `

Summarize the conversati ` -export const CONDITION_AGENT_SYSTEM_PROMPT = `You are part of a multi-agent system designed to make agent coordination and execution easy. Your task is to analyze the given input and select one matching scenario from a provided set of scenarios. If none of the scenarios match the input, you should return "default." - -- **Input**: A string representing the user's query or message. -- **Scenarios**: A list of predefined scenarios that relate to the input. -- **Instruction**: Determine if the input fits any of the scenarios. - -## Steps - -1. **Read the input string** and the list of scenarios. -2. **Analyze the content of the input** to identify its main topic or intention. -3. **Compare the input with each scenario**: - - If a scenario matches the main topic of the input, select that scenario. - - If no scenarios match, prepare to output "\`\`\`json\n{"output": "default"}\`\`\`" -4. **Output the result**: If a match is found, return the corresponding scenario in JSON; otherwise, return "\`\`\`json\n{"output": "default"}\`\`\`" - -## Output Format - -Output should be a JSON object that either names the matching scenario or returns "\`\`\`json\n{"output": "default"}\`\`\`" if no scenarios match. No explanation is needed. - -## Examples - -1. **Input**: {"input": "Hello", "scenarios": ["user is asking about AI", "default"], "instruction": "Your task is to check and see if user is asking topic about AI"} - **Output**: "\`\`\`json\n{"output": "default"}\`\`\`" - -2. **Input**: {"input": "What is AIGC?", "scenarios": ["user is asking about AI", "default"], "instruction": "Your task is to check and see if user is asking topic about AI"} - **Output**: "\`\`\`json\n{"output": "user is asking about AI"}\`\`\`" - -3. **Input**: {"input": "Can you explain deep learning?", "scenarios": ["user is interested in AI topics", "default"], "instruction": "Determine if the user is interested in learning about AI"} - **Output**: "\`\`\`json\n{"output": "user is interested in AI topics"}\`\`\`" - -## Note -- Ensure that the input scenarios align well with potential user queries for accurate matching -- DO NOT include anything other than the JSON in your response. -` +export const CONDITION_AGENT_SYSTEM_PROMPT = `

You are part of a multi-agent system designed to make agent coordination and execution easy. Your task is to analyze the given input and select one matching scenario from a provided set of scenarios.

+
    +
  • Input: A string representing the user's query, message or data.
  • +
  • Scenarios: A list of predefined scenarios that relate to the input.
  • +
  • Instruction: Determine which of the provided scenarios is the best fit for the input.
  • +
+

Steps

+
    +
  1. Read the input string and the list of scenarios.
  2. +
  3. Analyze the content of the input to identify its main topic or intention.
  4. +
  5. Compare the input with each scenario: Evaluate how well the input's topic or intention aligns with each of the provided scenarios and select the one that is the best fit.
  6. +
  7. Output the result: Return the selected scenario in the specified JSON format.
  8. +
+

Output Format

+

Output should be a JSON object that names the selected scenario, like this: {"output": ""}. No explanation is needed.

+

Examples

+
    +
  1. +

    Input: {"input": "Hello", "scenarios": ["user is asking about AI", "user is not asking about AI"], "instruction": "Your task is to check if the user is asking about AI."}

    +

    Output: {"output": "user is not asking about AI"}

    +
  2. +
  3. +

    Input: {"input": "What is AIGC?", "scenarios": ["user is asking about AI", "user is asking about the weather"], "instruction": "Your task is to check and see if the user is asking a topic about AI."}

    +

    Output: {"output": "user is asking about AI"}

    +
  4. +
  5. +

    Input: {"input": "Can you explain deep learning?", "scenarios": ["user is interested in AI topics", "user wants to order food"], "instruction": "Determine if the user is interested in learning about AI."}

    +

    Output: {"output": "user is interested in AI topics"}

    +
  6. +
+

Note

+
    +
  • Ensure that the input scenarios align well with potential user queries for accurate matching.
  • +
  • DO NOT include anything other than the JSON in your response.
  • +
` diff --git a/packages/components/nodes/agentflow/utils.ts b/packages/components/nodes/agentflow/utils.ts index 8891e74eb..14d832c8a 100644 --- a/packages/components/nodes/agentflow/utils.ts +++ b/packages/components/nodes/agentflow/utils.ts @@ -4,7 +4,7 @@ import { getFileFromStorage } from '../../src/storageUtils' import { ICommonObject, IFileUpload } from '../../src/Interface' import { BaseMessageLike } from '@langchain/core/messages' import { IFlowState } from './Interface.Agentflow' -import { mapMimeTypeToInputField } from '../../src/utils' +import { handleEscapeCharacters, mapMimeTypeToInputField } from '../../src/utils' export const addImagesToMessages = async ( options: ICommonObject, @@ -18,7 +18,7 @@ export const addImagesToMessages = async ( for (const upload of imageUploads) { let bf = upload.data if (upload.type == 'stored-file') { - const contents = await getFileFromStorage(upload.name, options.chatflowid, options.chatId) + const contents = await getFileFromStorage(upload.name, options.orgId, options.chatflowid, options.chatId) // as the image is stored in the server, read the file and convert it to base64 bf = 'data:' + upload.mime + ';base64,' + contents.toString('base64') @@ -90,7 +90,7 @@ export const processMessagesWithImages = async ( hasImageReferences = true try { // Get file contents from storage - const contents = await getFileFromStorage(item.name, options.chatflowid, options.chatId) + const contents = await getFileFromStorage(item.name, options.orgId, options.chatflowid, options.chatId) // Create base64 data URL const base64Data = 'data:' + item.mime + ';base64,' + contents.toString('base64') @@ -313,13 +313,16 @@ export const getPastChatHistoryImageMessages = async ( if (message.additional_kwargs && message.additional_kwargs.fileUploads) { // example: [{"type":"stored-file","name":"0_DiXc4ZklSTo3M8J4.jpg","mime":"image/jpeg"}] const fileUploads = message.additional_kwargs.fileUploads + const artifacts = message.additional_kwargs.artifacts + const fileAnnotations = message.additional_kwargs.fileAnnotations + const usedTools = message.additional_kwargs.usedTools try { let messageWithFileUploads = '' const uploads: IFileUpload[] = typeof fileUploads === 'string' ? JSON.parse(fileUploads) : fileUploads const imageContents: MessageContentImageUrl[] = [] for (const upload of uploads) { if (upload.type === 'stored-file' && upload.mime.startsWith('image/')) { - const fileData = await getFileFromStorage(upload.name, options.chatflowid, options.chatId) + const fileData = await getFileFromStorage(upload.name, options.orgId, options.chatflowid, options.chatId) // as the image is stored in the server, read the file and convert it to base64 const bf = 'data:' + upload.mime + ';base64,' + fileData.toString('base64') @@ -343,7 +346,8 @@ export const getPastChatHistoryImageMessages = async ( const nodeOptions = { retrieveAttachmentChatId: true, chatflowid: options.chatflowid, - chatId: options.chatId + chatId: options.chatId, + orgId: options.orgId } let fileInputFieldFromMimeType = 'txtFile' fileInputFieldFromMimeType = mapMimeTypeToInputField(upload.mime) @@ -353,26 +357,87 @@ export const getPastChatHistoryImageMessages = async ( } } const documents: string = await fileLoaderNodeInstance.init(nodeData, '', nodeOptions) - messageWithFileUploads += `${documents}\n\n` + messageWithFileUploads += `${handleEscapeCharacters(documents, true)}\n\n` } } const messageContent = messageWithFileUploads ? `${messageWithFileUploads}\n\n${message.content}` : message.content + const hasArtifacts = artifacts && Array.isArray(artifacts) && artifacts.length > 0 + const hasFileAnnotations = fileAnnotations && Array.isArray(fileAnnotations) && fileAnnotations.length > 0 + const hasUsedTools = usedTools && Array.isArray(usedTools) && usedTools.length > 0 + if (imageContents.length > 0) { - chatHistory.push({ + const imageMessage: any = { role: messageRole, content: imageContents - }) + } + if (hasArtifacts || hasFileAnnotations || hasUsedTools) { + imageMessage.additional_kwargs = {} + if (hasArtifacts) imageMessage.additional_kwargs.artifacts = artifacts + if (hasFileAnnotations) imageMessage.additional_kwargs.fileAnnotations = fileAnnotations + if (hasUsedTools) imageMessage.additional_kwargs.usedTools = usedTools + } + chatHistory.push(imageMessage) transformedPastMessages.push({ role: messageRole, content: [...JSON.parse((pastChatHistory[i] as any).additional_kwargs.fileUploads)] }) } - chatHistory.push({ + + const contentMessage: any = { role: messageRole, content: messageContent - }) + } + if (hasArtifacts || hasFileAnnotations || hasUsedTools) { + contentMessage.additional_kwargs = {} + if (hasArtifacts) contentMessage.additional_kwargs.artifacts = artifacts + if (hasFileAnnotations) contentMessage.additional_kwargs.fileAnnotations = fileAnnotations + if (hasUsedTools) contentMessage.additional_kwargs.usedTools = usedTools + } + chatHistory.push(contentMessage) } catch (e) { // failed to parse fileUploads, continue with text only + const hasArtifacts = artifacts && Array.isArray(artifacts) && artifacts.length > 0 + const hasFileAnnotations = fileAnnotations && Array.isArray(fileAnnotations) && fileAnnotations.length > 0 + const hasUsedTools = usedTools && Array.isArray(usedTools) && usedTools.length > 0 + + const errorMessage: any = { + role: messageRole, + content: message.content + } + if (hasArtifacts || hasFileAnnotations || hasUsedTools) { + errorMessage.additional_kwargs = {} + if (hasArtifacts) errorMessage.additional_kwargs.artifacts = artifacts + if (hasFileAnnotations) errorMessage.additional_kwargs.fileAnnotations = fileAnnotations + if (hasUsedTools) errorMessage.additional_kwargs.usedTools = usedTools + } + chatHistory.push(errorMessage) + } + } else if (message.additional_kwargs) { + const hasArtifacts = + message.additional_kwargs.artifacts && + Array.isArray(message.additional_kwargs.artifacts) && + message.additional_kwargs.artifacts.length > 0 + const hasFileAnnotations = + message.additional_kwargs.fileAnnotations && + Array.isArray(message.additional_kwargs.fileAnnotations) && + message.additional_kwargs.fileAnnotations.length > 0 + const hasUsedTools = + message.additional_kwargs.usedTools && + Array.isArray(message.additional_kwargs.usedTools) && + message.additional_kwargs.usedTools.length > 0 + + if (hasArtifacts || hasFileAnnotations || hasUsedTools) { + const messageAdditionalKwargs: any = {} + if (hasArtifacts) messageAdditionalKwargs.artifacts = message.additional_kwargs.artifacts + if (hasFileAnnotations) messageAdditionalKwargs.fileAnnotations = message.additional_kwargs.fileAnnotations + if (hasUsedTools) messageAdditionalKwargs.usedTools = message.additional_kwargs.usedTools + + chatHistory.push({ + role: messageRole, + content: message.content, + additional_kwargs: messageAdditionalKwargs + }) + } else { chatHistory.push({ role: messageRole, content: message.content @@ -394,9 +459,9 @@ export const getPastChatHistoryImageMessages = async ( /** * Updates the flow state with new values */ -export const updateFlowState = (state: ICommonObject, llmUpdateState: IFlowState[]): ICommonObject => { +export const updateFlowState = (state: ICommonObject, updateState: IFlowState[]): ICommonObject => { let newFlowState: Record = {} - for (const state of llmUpdateState) { + for (const state of updateState) { newFlowState[state.key] = state.value } diff --git a/packages/components/nodes/agents/AirtableAgent/AirtableAgent.ts b/packages/components/nodes/agents/AirtableAgent/AirtableAgent.ts index d61ffd4be..88c1c5bb8 100644 --- a/packages/components/nodes/agents/AirtableAgent/AirtableAgent.ts +++ b/packages/components/nodes/agents/AirtableAgent/AirtableAgent.ts @@ -128,7 +128,7 @@ class Airtable_Agents implements INode { let base64String = Buffer.from(JSON.stringify(airtableData)).toString('base64') - const loggerHandler = new ConsoleCallbackHandler(options.logger) + const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId) const callbacks = await additionalCallbacks(nodeData, options) const pyodide = await LoadPyodide() @@ -163,7 +163,7 @@ json.dumps(my_dict)` const chain = new LLMChain({ llm: model, prompt: PromptTemplate.fromTemplate(systemPrompt), - verbose: process.env.DEBUG === 'true' + verbose: process.env.DEBUG === 'true' ? true : false }) const inputs = { dict: dataframeColDict, @@ -183,7 +183,7 @@ json.dumps(my_dict)` // TODO: get print console output finalResult = await pyodide.runPythonAsync(code) } catch (error) { - throw new Error(`Sorry, I'm unable to find answer for question: "${input}" using follwoing code: "${pythonCode}"`) + throw new Error(`Sorry, I'm unable to find answer for question: "${input}" using following code: "${pythonCode}"`) } } @@ -192,7 +192,7 @@ json.dumps(my_dict)` const chain = new LLMChain({ llm: model, prompt: PromptTemplate.fromTemplate(finalSystemPrompt), - verbose: process.env.DEBUG === 'true' + verbose: process.env.DEBUG === 'true' ? true : false }) const inputs = { question: input, diff --git a/packages/components/nodes/agents/AutoGPT/AutoGPT.ts b/packages/components/nodes/agents/AutoGPT/AutoGPT.ts index c41a52965..04fd8e926 100644 --- a/packages/components/nodes/agents/AutoGPT/AutoGPT.ts +++ b/packages/components/nodes/agents/AutoGPT/AutoGPT.ts @@ -23,6 +23,7 @@ class AutoGPT_Agents implements INode { category: string baseClasses: string[] inputs: INodeParams[] + badge: string constructor() { this.label = 'AutoGPT' @@ -30,6 +31,7 @@ class AutoGPT_Agents implements INode { this.version = 2.0 this.type = 'AutoGPT' this.category = 'Agents' + this.badge = 'DEPRECATING' this.icon = 'autogpt.svg' this.description = 'Autonomous agent with chain of thoughts for self-guided task completion' this.baseClasses = ['AutoGPT'] diff --git a/packages/components/nodes/agents/BabyAGI/BabyAGI.ts b/packages/components/nodes/agents/BabyAGI/BabyAGI.ts index 87d5cd289..d3bad9039 100644 --- a/packages/components/nodes/agents/BabyAGI/BabyAGI.ts +++ b/packages/components/nodes/agents/BabyAGI/BabyAGI.ts @@ -15,6 +15,7 @@ class BabyAGI_Agents implements INode { category: string baseClasses: string[] inputs: INodeParams[] + badge: string constructor() { this.label = 'BabyAGI' @@ -23,6 +24,7 @@ class BabyAGI_Agents implements INode { this.type = 'BabyAGI' this.category = 'Agents' this.icon = 'babyagi.svg' + this.badge = 'DEPRECATING' this.description = 'Task Driven Autonomous Agent which creates new task and reprioritizes task list based on objective' this.baseClasses = ['BabyAGI'] this.inputs = [ diff --git a/packages/components/nodes/agents/CSVAgent/CSVAgent.ts b/packages/components/nodes/agents/CSVAgent/CSVAgent.ts index fbe85afc7..b94d91ad1 100644 --- a/packages/components/nodes/agents/CSVAgent/CSVAgent.ts +++ b/packages/components/nodes/agents/CSVAgent/CSVAgent.ts @@ -97,7 +97,7 @@ class CSV_Agents implements INode { } } - const loggerHandler = new ConsoleCallbackHandler(options.logger) + const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId) const shouldStreamResponse = options.shouldStreamResponse const sseStreamer: IServerSideEventStreamer = options.sseStreamer as IServerSideEventStreamer const chatId = options.chatId @@ -114,11 +114,12 @@ class CSV_Agents implements INode { } else { files = [fileName] } + const orgId = options.orgId const chatflowid = options.chatflowid for (const file of files) { if (!file) continue - const fileData = await getFileFromStorage(file, chatflowid) + const fileData = await getFileFromStorage(file, orgId, chatflowid) base64String += fileData.toString('base64') } } else { @@ -170,7 +171,7 @@ json.dumps(my_dict)` const chain = new LLMChain({ llm: model, prompt: PromptTemplate.fromTemplate(systemPrompt), - verbose: process.env.DEBUG === 'true' + verbose: process.env.DEBUG === 'true' ? true : false }) const inputs = { dict: dataframeColDict, @@ -201,7 +202,7 @@ json.dumps(my_dict)` prompt: PromptTemplate.fromTemplate( systemMessagePrompt ? `${systemMessagePrompt}\n${finalSystemPrompt}` : finalSystemPrompt ), - verbose: process.env.DEBUG === 'true' + verbose: process.env.DEBUG === 'true' ? true : false }) const inputs = { question: input, diff --git a/packages/components/nodes/agents/ConversationalAgent/ConversationalAgent.ts b/packages/components/nodes/agents/ConversationalAgent/ConversationalAgent.ts index 4a5d91087..8583826da 100644 --- a/packages/components/nodes/agents/ConversationalAgent/ConversationalAgent.ts +++ b/packages/components/nodes/agents/ConversationalAgent/ConversationalAgent.ts @@ -132,7 +132,7 @@ class ConversationalAgent_Agents implements INode { } const executor = await prepareAgent(nodeData, options, { sessionId: this.sessionId, chatId: options.chatId, input }) - const loggerHandler = new ConsoleCallbackHandler(options.logger) + const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId) const callbacks = await additionalCallbacks(nodeData, options) let res: ChainValues = {} diff --git a/packages/components/nodes/agents/ConversationalRetrievalToolAgent/ConversationalRetrievalToolAgent.ts b/packages/components/nodes/agents/ConversationalRetrievalToolAgent/ConversationalRetrievalToolAgent.ts index 54698ca13..7a8966e14 100644 --- a/packages/components/nodes/agents/ConversationalRetrievalToolAgent/ConversationalRetrievalToolAgent.ts +++ b/packages/components/nodes/agents/ConversationalRetrievalToolAgent/ConversationalRetrievalToolAgent.ts @@ -5,7 +5,7 @@ import { RunnableSequence } from '@langchain/core/runnables' import { BaseChatModel } from '@langchain/core/language_models/chat_models' import { ChatPromptTemplate, MessagesPlaceholder, HumanMessagePromptTemplate, PromptTemplate } from '@langchain/core/prompts' import { formatToOpenAIToolMessages } from 'langchain/agents/format_scratchpad/openai_tools' -import { getBaseClasses, transformBracesWithColon } from '../../../src/utils' +import { getBaseClasses, transformBracesWithColon, convertChatHistoryToText, convertBaseMessagetoIMessage } from '../../../src/utils' import { type ToolsAgentStep } from 'langchain/agents/openai/output_parser' import { FlowiseMemory, @@ -23,8 +23,10 @@ import { Moderation, checkInputs, streamResponse } from '../../moderation/Modera import { formatResponse } from '../../outputparsers/OutputParserHelpers' import type { Document } from '@langchain/core/documents' import { BaseRetriever } from '@langchain/core/retrievers' -import { RESPONSE_TEMPLATE } from '../../chains/ConversationalRetrievalQAChain/prompts' +import { RESPONSE_TEMPLATE, REPHRASE_TEMPLATE } from '../../chains/ConversationalRetrievalQAChain/prompts' import { addImagesToMessages, llmSupportsVision } from '../../../src/multiModalUtils' +import { StringOutputParser } from '@langchain/core/output_parsers' +import { Tool } from '@langchain/core/tools' class ConversationalRetrievalToolAgent_Agents implements INode { label: string @@ -42,7 +44,7 @@ class ConversationalRetrievalToolAgent_Agents implements INode { constructor(fields?: { sessionId?: string }) { this.label = 'Conversational Retrieval Tool Agent' this.name = 'conversationalRetrievalToolAgent' - this.author = 'niztal(falkor)' + this.author = 'niztal(falkor) and nikitas-novatix' this.version = 1.0 this.type = 'AgentExecutor' this.category = 'Agents' @@ -79,6 +81,26 @@ class ConversationalRetrievalToolAgent_Agents implements INode { optional: true, default: RESPONSE_TEMPLATE }, + { + label: 'Rephrase Prompt', + name: 'rephrasePrompt', + type: 'string', + description: 'Using previous chat history, rephrase question into a standalone question', + warning: 'Prompt must include input variables: {chat_history} and {question}', + rows: 4, + additionalParams: true, + optional: true, + default: REPHRASE_TEMPLATE + }, + { + label: 'Rephrase Model', + name: 'rephraseModel', + type: 'BaseChatModel', + description: + 'Optional: Use a different (faster/cheaper) model for rephrasing. If not specified, uses the main Tool Calling Chat Model.', + optional: true, + additionalParams: true + }, { label: 'Input Moderation', description: 'Detect text that could generate harmful output and prevent it from being sent to the language model', @@ -103,8 +125,9 @@ class ConversationalRetrievalToolAgent_Agents implements INode { this.sessionId = fields?.sessionId } - async init(nodeData: INodeData, input: string, options: ICommonObject): Promise { - return prepareAgent(nodeData, options, { sessionId: this.sessionId, chatId: options.chatId, input }) + // The agent will be prepared in run() with the correct user message - it needs the actual runtime input for rephrasing + async init(_nodeData: INodeData, _input: string, _options: ICommonObject): Promise { + return null } async run(nodeData: INodeData, input: string, options: ICommonObject): Promise { @@ -130,7 +153,7 @@ class ConversationalRetrievalToolAgent_Agents implements INode { const executor = await prepareAgent(nodeData, options, { sessionId: this.sessionId, chatId: options.chatId, input }) - const loggerHandler = new ConsoleCallbackHandler(options.logger) + const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId) const callbacks = await additionalCallbacks(nodeData, options) let res: ChainValues = {} @@ -148,6 +171,23 @@ class ConversationalRetrievalToolAgent_Agents implements INode { sseStreamer.streamUsedToolsEvent(chatId, res.usedTools) usedTools = res.usedTools } + + // If the tool is set to returnDirect, stream the output to the client + if (res.usedTools && res.usedTools.length) { + let inputTools = nodeData.inputs?.tools + inputTools = flatten(inputTools) + for (const tool of res.usedTools) { + const inputTool = inputTools.find((inputTool: Tool) => inputTool.name === tool.tool) + if (inputTool && (inputTool as any).returnDirect && shouldStreamResponse) { + sseStreamer.streamTokenEvent(chatId, tool.toolOutput) + // Prevent CustomChainHandler from streaming the same output again + if (res.output === tool.toolOutput) { + res.output = '' + } + } + } + } + // The CustomChainHandler will send the stream end event } else { res = await executor.invoke({ input }, { callbacks: [loggerHandler, ...callbacks] }) if (res.sourceDocuments) { @@ -210,9 +250,11 @@ const prepareAgent = async ( flowObj: { sessionId?: string; chatId?: string; input?: string } ) => { const model = nodeData.inputs?.model as BaseChatModel + const rephraseModel = (nodeData.inputs?.rephraseModel as BaseChatModel) || model // Use main model if not specified const maxIterations = nodeData.inputs?.maxIterations as string const memory = nodeData.inputs?.memory as FlowiseMemory let systemMessage = nodeData.inputs?.systemMessage as string + let rephrasePrompt = nodeData.inputs?.rephrasePrompt as string let tools = nodeData.inputs?.tools tools = flatten(tools) const memoryKey = memory.memoryKey ? memory.memoryKey : 'chat_history' @@ -220,6 +262,9 @@ const prepareAgent = async ( const vectorStoreRetriever = nodeData.inputs?.vectorStoreRetriever as BaseRetriever systemMessage = transformBracesWithColon(systemMessage) + if (rephrasePrompt) { + rephrasePrompt = transformBracesWithColon(rephrasePrompt) + } const prompt = ChatPromptTemplate.fromMessages([ ['system', systemMessage ? systemMessage : `You are a helpful AI assistant.`], @@ -263,6 +308,37 @@ const prepareAgent = async ( const modelWithTools = model.bindTools(tools) + // Function to get standalone question (either rephrased or original) + const getStandaloneQuestion = async (input: string): Promise => { + // If no rephrase prompt, return the original input + if (!rephrasePrompt) { + return input + } + + // Get chat history (use empty string if none) + const messages = (await memory.getChatMessages(flowObj?.sessionId, true)) as BaseMessage[] + const iMessages = convertBaseMessagetoIMessage(messages) + const chatHistoryString = convertChatHistoryToText(iMessages) + + // Always rephrase to normalize/expand user queries for better retrieval + try { + const CONDENSE_QUESTION_PROMPT = PromptTemplate.fromTemplate(rephrasePrompt) + const condenseQuestionChain = RunnableSequence.from([CONDENSE_QUESTION_PROMPT, rephraseModel, new StringOutputParser()]) + const res = await condenseQuestionChain.invoke({ + question: input, + chat_history: chatHistoryString + }) + return res + } catch (error) { + console.error('Error rephrasing question:', error) + // On error, fall back to original input + return input + } + } + + // Get standalone question before creating runnable + const standaloneQuestion = await getStandaloneQuestion(flowObj?.input || '') + const runnableAgent = RunnableSequence.from([ { [inputKey]: (i: { input: string; steps: ToolsAgentStep[] }) => i.input, @@ -272,7 +348,9 @@ const prepareAgent = async ( return messages ?? [] }, context: async (i: { input: string; chatHistory?: string }) => { - const relevantDocs = await vectorStoreRetriever.invoke(i.input) + // Use the standalone question (rephrased or original) for retrieval + const retrievalQuery = standaloneQuestion || i.input + const relevantDocs = await vectorStoreRetriever.invoke(retrievalQuery) const formattedDocs = formatDocs(relevantDocs) return formattedDocs } @@ -288,11 +366,13 @@ const prepareAgent = async ( sessionId: flowObj?.sessionId, chatId: flowObj?.chatId, input: flowObj?.input, - verbose: process.env.DEBUG === 'true', + verbose: process.env.DEBUG === 'true' ? true : false, maxIterations: maxIterations ? parseFloat(maxIterations) : undefined }) return executor } -module.exports = { nodeClass: ConversationalRetrievalToolAgent_Agents } +module.exports = { + nodeClass: ConversationalRetrievalToolAgent_Agents +} diff --git a/packages/components/nodes/agents/LlamaIndexAgents/AnthropicAgent/AnthropicAgent_LlamaIndex.ts b/packages/components/nodes/agents/LlamaIndexAgents/AnthropicAgent/AnthropicAgent_LlamaIndex.ts index c218ff654..257250020 100644 --- a/packages/components/nodes/agents/LlamaIndexAgents/AnthropicAgent/AnthropicAgent_LlamaIndex.ts +++ b/packages/components/nodes/agents/LlamaIndexAgents/AnthropicAgent/AnthropicAgent_LlamaIndex.ts @@ -2,6 +2,7 @@ import { flatten } from 'lodash' import { MessageContentTextDetail, ChatMessage, AnthropicAgent, Anthropic } from 'llamaindex' import { getBaseClasses } from '../../../../src/utils' import { FlowiseMemory, ICommonObject, IMessage, INode, INodeData, INodeParams, IUsedTool } from '../../../../src/Interface' +import { EvaluationRunTracerLlama } from '../../../../evaluation/EvaluationRunTracerLlama' class AnthropicAgent_LlamaIndex_Agents implements INode { label: string @@ -96,13 +97,16 @@ class AnthropicAgent_LlamaIndex_Agents implements INode { tools, llm: model, chatHistory: chatHistory, - verbose: process.env.DEBUG === 'true' + verbose: process.env.DEBUG === 'true' ? true : false }) + // these are needed for evaluation runs + await EvaluationRunTracerLlama.injectEvaluationMetadata(nodeData, options, agent) + let text = '' const usedTools: IUsedTool[] = [] - const response = await agent.chat({ message: input, chatHistory, verbose: process.env.DEBUG === 'true' }) + const response = await agent.chat({ message: input, chatHistory, verbose: process.env.DEBUG === 'true' ? true : false }) if (response.sources.length) { for (const sourceTool of response.sources) { diff --git a/packages/components/nodes/agents/LlamaIndexAgents/OpenAIToolAgent/OpenAIToolAgent_LlamaIndex.ts b/packages/components/nodes/agents/LlamaIndexAgents/OpenAIToolAgent/OpenAIToolAgent_LlamaIndex.ts index 07b2578bd..657fed6bf 100644 --- a/packages/components/nodes/agents/LlamaIndexAgents/OpenAIToolAgent/OpenAIToolAgent_LlamaIndex.ts +++ b/packages/components/nodes/agents/LlamaIndexAgents/OpenAIToolAgent/OpenAIToolAgent_LlamaIndex.ts @@ -1,6 +1,7 @@ import { flatten } from 'lodash' import { ChatMessage, OpenAI, OpenAIAgent } from 'llamaindex' import { getBaseClasses } from '../../../../src/utils' +import { EvaluationRunTracerLlama } from '../../../../evaluation/EvaluationRunTracerLlama' import { FlowiseMemory, ICommonObject, @@ -107,9 +108,12 @@ class OpenAIFunctionAgent_LlamaIndex_Agents implements INode { tools, llm: model, chatHistory: chatHistory, - verbose: process.env.DEBUG === 'true' + verbose: process.env.DEBUG === 'true' ? true : false }) + // these are needed for evaluation runs + await EvaluationRunTracerLlama.injectEvaluationMetadata(nodeData, options, agent) + let text = '' let isStreamingStarted = false const usedTools: IUsedTool[] = [] @@ -119,10 +123,9 @@ class OpenAIFunctionAgent_LlamaIndex_Agents implements INode { message: input, chatHistory, stream: true, - verbose: process.env.DEBUG === 'true' + verbose: process.env.DEBUG === 'true' ? true : false }) for await (const chunk of stream) { - //console.log('chunk', chunk) text += chunk.response.delta if (!isStreamingStarted) { isStreamingStarted = true @@ -147,7 +150,7 @@ class OpenAIFunctionAgent_LlamaIndex_Agents implements INode { } } } else { - const response = await agent.chat({ message: input, chatHistory, verbose: process.env.DEBUG === 'true' }) + const response = await agent.chat({ message: input, chatHistory, verbose: process.env.DEBUG === 'true' ? true : false }) if (response.sources.length) { for (const sourceTool of response.sources) { usedTools.push({ diff --git a/packages/components/nodes/agents/OpenAIAssistant/OpenAIAssistant.ts b/packages/components/nodes/agents/OpenAIAssistant/OpenAIAssistant.ts index f8886983d..e87745492 100644 --- a/packages/components/nodes/agents/OpenAIAssistant/OpenAIAssistant.ts +++ b/packages/components/nodes/agents/OpenAIAssistant/OpenAIAssistant.ts @@ -107,7 +107,11 @@ class OpenAIAssistant_Agents implements INode { return returnData } - const assistants = await appDataSource.getRepository(databaseEntities['Assistant']).find() + const searchOptions = options.searchOptions || {} + const assistants = await appDataSource.getRepository(databaseEntities['Assistant']).findBy({ + ...searchOptions, + type: 'OPENAI' + }) for (let i = 0; i < assistants.length; i += 1) { const assistantDetails = JSON.parse(assistants[i].details) @@ -130,13 +134,14 @@ class OpenAIAssistant_Agents implements INode { const selectedAssistantId = nodeData.inputs?.selectedAssistant as string const appDataSource = options.appDataSource as DataSource const databaseEntities = options.databaseEntities as IDatabaseEntity + const orgId = options.orgId const assistant = await appDataSource.getRepository(databaseEntities['Assistant']).findOneBy({ id: selectedAssistantId }) if (!assistant) { - options.logger.error(`Assistant ${selectedAssistantId} not found`) + options.logger.error(`[${orgId}]: Assistant ${selectedAssistantId} not found`) return } @@ -149,7 +154,7 @@ class OpenAIAssistant_Agents implements INode { chatId }) if (!chatmsg) { - options.logger.error(`Chat Message with Chat Id: ${chatId} not found`) + options.logger.error(`[${orgId}]: Chat Message with Chat Id: ${chatId} not found`) return } sessionId = chatmsg.sessionId @@ -160,21 +165,21 @@ class OpenAIAssistant_Agents implements INode { const credentialData = await getCredentialData(assistant.credential ?? '', options) const openAIApiKey = getCredentialParam('openAIApiKey', credentialData, nodeData) if (!openAIApiKey) { - options.logger.error(`OpenAI ApiKey not found`) + options.logger.error(`[${orgId}]: OpenAI ApiKey not found`) return } const openai = new OpenAI({ apiKey: openAIApiKey }) - options.logger.info(`Clearing OpenAI Thread ${sessionId}`) + options.logger.info(`[${orgId}]: Clearing OpenAI Thread ${sessionId}`) try { if (sessionId && sessionId.startsWith('thread_')) { await openai.beta.threads.del(sessionId) - options.logger.info(`Successfully cleared OpenAI Thread ${sessionId}`) + options.logger.info(`[${orgId}]: Successfully cleared OpenAI Thread ${sessionId}`) } else { - options.logger.error(`Error clearing OpenAI Thread ${sessionId}`) + options.logger.error(`[${orgId}]: Error clearing OpenAI Thread ${sessionId}`) } } catch (e) { - options.logger.error(`Error clearing OpenAI Thread ${sessionId}`) + options.logger.error(`[${orgId}]: Error clearing OpenAI Thread ${sessionId}`) } } @@ -190,6 +195,17 @@ class OpenAIAssistant_Agents implements INode { const shouldStreamResponse = options.shouldStreamResponse const sseStreamer: IServerSideEventStreamer = options.sseStreamer as IServerSideEventStreamer const chatId = options.chatId + const checkStorage = options.checkStorage + ? (options.checkStorage as (orgId: string, subscriptionId: string, usageCacheManager: any) => Promise) + : undefined + const updateStorageUsage = options.updateStorageUsage + ? (options.updateStorageUsage as ( + orgId: string, + workspaceId: string, + totalSize: number, + usageCacheManager: any + ) => Promise) + : undefined if (moderations && moderations.length > 0) { try { @@ -380,17 +396,30 @@ class OpenAIAssistant_Agents implements INode { // eslint-disable-next-line no-useless-escape const fileName = cited_file.filename.split(/[\/\\]/).pop() ?? cited_file.filename if (!disableFileDownload) { - filePath = await downloadFile( + if (checkStorage) + await checkStorage(options.orgId, options.subscriptionId, options.usageCacheManager) + + const { path, totalSize } = await downloadFile( openAIApiKey, cited_file, fileName, + options.orgId, options.chatflowid, options.chatId ) + filePath = path fileAnnotations.push({ filePath, fileName }) + + if (updateStorageUsage) + await updateStorageUsage( + options.orgId, + options.workspaceId, + totalSize, + options.usageCacheManager + ) } } else { const file_path = (annotation as OpenAI.Beta.Threads.Messages.FilePathAnnotation).file_path @@ -399,17 +428,30 @@ class OpenAIAssistant_Agents implements INode { // eslint-disable-next-line no-useless-escape const fileName = cited_file.filename.split(/[\/\\]/).pop() ?? cited_file.filename if (!disableFileDownload) { - filePath = await downloadFile( + if (checkStorage) + await checkStorage(options.orgId, options.subscriptionId, options.usageCacheManager) + + const { path, totalSize } = await downloadFile( openAIApiKey, cited_file, fileName, + options.orgId, options.chatflowid, options.chatId ) + filePath = path fileAnnotations.push({ filePath, fileName }) + + if (updateStorageUsage) + await updateStorageUsage( + options.orgId, + options.workspaceId, + totalSize, + options.usageCacheManager + ) } } } @@ -467,15 +509,21 @@ class OpenAIAssistant_Agents implements INode { const fileId = chunk.image_file.file_id const fileObj = await openai.files.retrieve(fileId) - const filePath = await downloadImg( + if (checkStorage) await checkStorage(options.orgId, options.subscriptionId, options.usageCacheManager) + + const { filePath, totalSize } = await downloadImg( openai, fileId, `${fileObj.filename}.png`, + options.orgId, options.chatflowid, options.chatId ) artifacts.push({ type: 'png', data: filePath }) + if (updateStorageUsage) + await updateStorageUsage(options.orgId, options.workspaceId, totalSize, options.usageCacheManager) + if (!isStreamingStarted) { isStreamingStarted = true if (sseStreamer) { @@ -530,7 +578,7 @@ class OpenAIAssistant_Agents implements INode { toolOutput }) } catch (e) { - await analyticHandlers.onToolEnd(toolIds, e) + await analyticHandlers.onToolError(toolIds, e) console.error('Error executing tool', e) throw new Error( `Error executing tool. Tool: ${tool.name}. Thread ID: ${threadId}. Run ID: ${runThreadId}` @@ -655,7 +703,7 @@ class OpenAIAssistant_Agents implements INode { toolOutput }) } catch (e) { - await analyticHandlers.onToolEnd(toolIds, e) + await analyticHandlers.onToolError(toolIds, e) console.error('Error executing tool', e) clearInterval(timeout) reject( @@ -776,7 +824,21 @@ class OpenAIAssistant_Agents implements INode { // eslint-disable-next-line no-useless-escape const fileName = cited_file.filename.split(/[\/\\]/).pop() ?? cited_file.filename if (!disableFileDownload) { - filePath = await downloadFile(openAIApiKey, cited_file, fileName, options.chatflowid, options.chatId) + if (checkStorage) await checkStorage(options.orgId, options.subscriptionId, options.usageCacheManager) + + const { path, totalSize } = await downloadFile( + openAIApiKey, + cited_file, + fileName, + options.orgId, + options.chatflowid, + options.chatId + ) + filePath = path + + if (updateStorageUsage) + await updateStorageUsage(options.orgId, options.workspaceId, totalSize, options.usageCacheManager) + fileAnnotations.push({ filePath, fileName @@ -789,13 +851,27 @@ class OpenAIAssistant_Agents implements INode { // eslint-disable-next-line no-useless-escape const fileName = cited_file.filename.split(/[\/\\]/).pop() ?? cited_file.filename if (!disableFileDownload) { - filePath = await downloadFile( + if (checkStorage) + await checkStorage(options.orgId, options.subscriptionId, options.usageCacheManager) + + const { path, totalSize } = await downloadFile( openAIApiKey, cited_file, fileName, + options.orgId, options.chatflowid, options.chatId ) + filePath = path + + if (updateStorageUsage) + await updateStorageUsage( + options.orgId, + options.workspaceId, + totalSize, + options.usageCacheManager + ) + fileAnnotations.push({ filePath, fileName @@ -822,7 +898,20 @@ class OpenAIAssistant_Agents implements INode { const fileId = content.image_file.file_id const fileObj = await openai.files.retrieve(fileId) - const filePath = await downloadImg(openai, fileId, `${fileObj.filename}.png`, options.chatflowid, options.chatId) + if (checkStorage) await checkStorage(options.orgId, options.subscriptionId, options.usageCacheManager) + + const { filePath, totalSize } = await downloadImg( + openai, + fileId, + `${fileObj.filename}.png`, + options.orgId, + options.chatflowid, + options.chatId + ) + + if (updateStorageUsage) + await updateStorageUsage(options.orgId, options.workspaceId, totalSize, options.usageCacheManager) + artifacts.push({ type: 'png', data: filePath }) } } @@ -847,7 +936,13 @@ class OpenAIAssistant_Agents implements INode { } } -const downloadImg = async (openai: OpenAI, fileId: string, fileName: string, ...paths: string[]) => { +const downloadImg = async ( + openai: OpenAI, + fileId: string, + fileName: string, + orgId: string, + ...paths: string[] +): Promise<{ filePath: string; totalSize: number }> => { const response = await openai.files.content(fileId) // Extract the binary data from the Response object @@ -857,12 +952,18 @@ const downloadImg = async (openai: OpenAI, fileId: string, fileName: string, ... const image_data_buffer = Buffer.from(image_data) const mime = 'image/png' - const res = await addSingleFileToStorage(mime, image_data_buffer, fileName, ...paths) + const { path, totalSize } = await addSingleFileToStorage(mime, image_data_buffer, fileName, orgId, ...paths) - return res + return { filePath: path, totalSize } } -const downloadFile = async (openAIApiKey: string, fileObj: any, fileName: string, ...paths: string[]) => { +const downloadFile = async ( + openAIApiKey: string, + fileObj: any, + fileName: string, + orgId: string, + ...paths: string[] +): Promise<{ path: string; totalSize: number }> => { try { const response = await fetch(`https://api.openai.com/v1/files/${fileObj.id}/content`, { method: 'GET', @@ -880,10 +981,12 @@ const downloadFile = async (openAIApiKey: string, fileObj: any, fileName: string const data_buffer = Buffer.from(data) const mime = 'application/octet-stream' - return await addSingleFileToStorage(mime, data_buffer, fileName, ...paths) + const { path, totalSize } = await addSingleFileToStorage(mime, data_buffer, fileName, orgId, ...paths) + + return { path, totalSize } } catch (error) { console.error('Error downloading or writing the file:', error) - return '' + return { path: '', totalSize: 0 } } } @@ -993,7 +1096,7 @@ async function handleToolSubmission(params: ToolSubmissionParams): Promise { const chain = await initChain(nodeData, options) - const loggerHandler = new ConsoleCallbackHandler(options.logger) + const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId) const callbacks = await additionalCallbacks(nodeData, options) const moderations = nodeData.inputs?.inputModeration as Moderation[] const shouldStreamResponse = options.shouldStreamResponse @@ -114,8 +116,9 @@ const initChain = async (nodeData: INodeData, options: ICommonObject) => { } else { if (yamlFileBase64.startsWith('FILE-STORAGE::')) { const file = yamlFileBase64.replace('FILE-STORAGE::', '') + const orgId = options.orgId const chatflowid = options.chatflowid - const fileData = await getFileFromStorage(file, chatflowid) + const fileData = await getFileFromStorage(file, orgId, chatflowid) yamlString = fileData.toString() } else { const splitDataURI = yamlFileBase64.split(',') @@ -128,7 +131,7 @@ const initChain = async (nodeData: INodeData, options: ICommonObject) => { return await createOpenAPIChain(yamlString, { llm: model, headers: typeof headers === 'object' ? headers : headers ? JSON.parse(headers) : {}, - verbose: process.env.DEBUG === 'true' + verbose: process.env.DEBUG === 'true' ? true : false }) } diff --git a/packages/components/nodes/chains/ApiChain/POSTApiChain.ts b/packages/components/nodes/chains/ApiChain/POSTApiChain.ts index da033d2d2..b9a489c4d 100644 --- a/packages/components/nodes/chains/ApiChain/POSTApiChain.ts +++ b/packages/components/nodes/chains/ApiChain/POSTApiChain.ts @@ -15,6 +15,7 @@ class POSTApiChain_Chains implements INode { baseClasses: string[] description: string inputs: INodeParams[] + badge: string constructor() { this.label = 'POST API Chain' @@ -23,6 +24,7 @@ class POSTApiChain_Chains implements INode { this.type = 'POSTApiChain' this.icon = 'post.svg' this.category = 'Chains' + this.badge = 'DEPRECATING' this.description = 'Chain to run queries against POST API' this.baseClasses = [this.type, ...getBaseClasses(APIChain)] this.inputs = [ @@ -87,7 +89,7 @@ class POSTApiChain_Chains implements INode { const ansPrompt = nodeData.inputs?.ansPrompt as string const chain = await getAPIChain(apiDocs, model, headers, urlPrompt, ansPrompt) - const loggerHandler = new ConsoleCallbackHandler(options.logger) + const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId) const callbacks = await additionalCallbacks(nodeData, options) const shouldStreamResponse = options.shouldStreamResponse @@ -119,7 +121,7 @@ const getAPIChain = async (documents: string, llm: BaseLanguageModel, headers: s const chain = APIChain.fromLLMAndAPIDocs(llm, documents, { apiUrlPrompt, apiResponsePrompt, - verbose: process.env.DEBUG === 'true', + verbose: process.env.DEBUG === 'true' ? true : false, headers: typeof headers === 'object' ? headers : headers ? JSON.parse(headers) : {} }) return chain diff --git a/packages/components/nodes/chains/ConversationChain/ConversationChain.ts b/packages/components/nodes/chains/ConversationChain/ConversationChain.ts index f0d3de7aa..04e36daf3 100644 --- a/packages/components/nodes/chains/ConversationChain/ConversationChain.ts +++ b/packages/components/nodes/chains/ConversationChain/ConversationChain.ts @@ -132,7 +132,7 @@ class ConversationChain_Chains implements INode { } } - const loggerHandler = new ConsoleCallbackHandler(options.logger) + const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId) const additionalCallback = await additionalCallbacks(nodeData, options) let res = '' diff --git a/packages/components/nodes/chains/ConversationalRetrievalQAChain/ConversationalRetrievalQAChain.ts b/packages/components/nodes/chains/ConversationalRetrievalQAChain/ConversationalRetrievalQAChain.ts index 29528ae5c..31dfa8b1a 100644 --- a/packages/components/nodes/chains/ConversationalRetrievalQAChain/ConversationalRetrievalQAChain.ts +++ b/packages/components/nodes/chains/ConversationalRetrievalQAChain/ConversationalRetrievalQAChain.ts @@ -185,6 +185,7 @@ class ConversationalRetrievalQAChain_Chains implements INode { const shouldStreamResponse = options.shouldStreamResponse const sseStreamer: IServerSideEventStreamer = options.sseStreamer as IServerSideEventStreamer const chatId = options.chatId + const orgId = options.orgId let customResponsePrompt = responsePrompt // If the deprecated systemMessagePrompt is still exists @@ -200,7 +201,8 @@ class ConversationalRetrievalQAChain_Chains implements INode { memoryKey: 'chat_history', appDataSource, databaseEntities, - chatflowid + chatflowid, + orgId }) } @@ -220,7 +222,7 @@ class ConversationalRetrievalQAChain_Chains implements INode { const history = ((await memory.getChatMessages(this.sessionId, false, prependMessages)) as IMessage[]) ?? [] - const loggerHandler = new ConsoleCallbackHandler(options.logger) + const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId) const additionalCallback = await additionalCallbacks(nodeData, options) let callbacks = [loggerHandler, ...additionalCallback] @@ -407,18 +409,21 @@ interface BufferMemoryExtendedInput { appDataSource: DataSource databaseEntities: IDatabaseEntity chatflowid: string + orgId: string } class BufferMemory extends FlowiseMemory implements MemoryMethods { appDataSource: DataSource databaseEntities: IDatabaseEntity chatflowid: string + orgId: string constructor(fields: BufferMemoryInput & BufferMemoryExtendedInput) { super(fields) this.appDataSource = fields.appDataSource this.databaseEntities = fields.databaseEntities this.chatflowid = fields.chatflowid + this.orgId = fields.orgId } async getChatMessages( @@ -443,7 +448,7 @@ class BufferMemory extends FlowiseMemory implements MemoryMethods { } if (returnBaseMessages) { - return await mapChatMessageToBaseMessage(chatMessage) + return await mapChatMessageToBaseMessage(chatMessage, this.orgId) } let returnIMessages: IMessage[] = [] diff --git a/packages/components/nodes/chains/GraphCypherQAChain/GraphCypherQAChain.ts b/packages/components/nodes/chains/GraphCypherQAChain/GraphCypherQAChain.ts index fb7dc4a7d..5a2f16c09 100644 --- a/packages/components/nodes/chains/GraphCypherQAChain/GraphCypherQAChain.ts +++ b/packages/components/nodes/chains/GraphCypherQAChain/GraphCypherQAChain.ts @@ -215,7 +215,7 @@ class GraphCypherQA_Chain implements INode { query: input } - const loggerHandler = new ConsoleCallbackHandler(options.logger) + const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId) const callbackHandlers = await additionalCallbacks(nodeData, options) let callbacks = [loggerHandler, ...callbackHandlers] diff --git a/packages/components/nodes/chains/LLMChain/LLMChain.ts b/packages/components/nodes/chains/LLMChain/LLMChain.ts index f72603635..801358126 100644 --- a/packages/components/nodes/chains/LLMChain/LLMChain.ts +++ b/packages/components/nodes/chains/LLMChain/LLMChain.ts @@ -167,7 +167,7 @@ const runPrediction = async ( nodeData: INodeData, disableStreaming?: boolean ) => { - const loggerHandler = new ConsoleCallbackHandler(options.logger) + const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId) const callbacks = await additionalCallbacks(nodeData, options) const moderations = nodeData.inputs?.inputModeration as Moderation[] diff --git a/packages/components/nodes/chains/MultiPromptChain/MultiPromptChain.ts b/packages/components/nodes/chains/MultiPromptChain/MultiPromptChain.ts index 7863981c2..da6834005 100644 --- a/packages/components/nodes/chains/MultiPromptChain/MultiPromptChain.ts +++ b/packages/components/nodes/chains/MultiPromptChain/MultiPromptChain.ts @@ -16,11 +16,13 @@ class MultiPromptChain_Chains implements INode { baseClasses: string[] description: string inputs: INodeParams[] + badge: string constructor() { this.label = 'Multi Prompt Chain' this.name = 'multiPromptChain' this.version = 2.0 + this.badge = 'DEPRECATING' this.type = 'MultiPromptChain' this.icon = 'prompt.svg' this.category = 'Chains' @@ -66,7 +68,7 @@ class MultiPromptChain_Chains implements INode { promptNames, promptDescriptions, promptTemplates, - llmChainOpts: { verbose: process.env.DEBUG === 'true' } + llmChainOpts: { verbose: process.env.DEBUG === 'true' ? true : false } }) return chain @@ -95,7 +97,7 @@ class MultiPromptChain_Chains implements INode { } const obj = { input } - const loggerHandler = new ConsoleCallbackHandler(options.logger) + const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId) const callbacks = await additionalCallbacks(nodeData, options) if (shouldStreamResponse) { diff --git a/packages/components/nodes/chains/MultiRetrievalQAChain/MultiRetrievalQAChain.ts b/packages/components/nodes/chains/MultiRetrievalQAChain/MultiRetrievalQAChain.ts index eed73f4cc..bdcd37621 100644 --- a/packages/components/nodes/chains/MultiRetrievalQAChain/MultiRetrievalQAChain.ts +++ b/packages/components/nodes/chains/MultiRetrievalQAChain/MultiRetrievalQAChain.ts @@ -15,12 +15,14 @@ class MultiRetrievalQAChain_Chains implements INode { category: string baseClasses: string[] description: string + badge: string inputs: INodeParams[] constructor() { this.label = 'Multi Retrieval QA Chain' this.name = 'multiRetrievalQAChain' this.version = 2.0 + this.badge = 'DEPRECATING' this.type = 'MultiRetrievalQAChain' this.icon = 'qa.svg' this.category = 'Chains' @@ -74,7 +76,7 @@ class MultiRetrievalQAChain_Chains implements INode { retrieverNames, retrieverDescriptions, retrievers, - retrievalQAChainOpts: { verbose: process.env.DEBUG === 'true', returnSourceDocuments } + retrievalQAChainOpts: { verbose: process.env.DEBUG === 'true' ? true : false, returnSourceDocuments } }) return chain } @@ -101,7 +103,7 @@ class MultiRetrievalQAChain_Chains implements INode { } } const obj = { input } - const loggerHandler = new ConsoleCallbackHandler(options.logger) + const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId) const callbacks = await additionalCallbacks(nodeData, options) if (shouldStreamResponse) { diff --git a/packages/components/nodes/chains/RetrievalQAChain/RetrievalQAChain.ts b/packages/components/nodes/chains/RetrievalQAChain/RetrievalQAChain.ts index 8e7453d75..f82d92e06 100644 --- a/packages/components/nodes/chains/RetrievalQAChain/RetrievalQAChain.ts +++ b/packages/components/nodes/chains/RetrievalQAChain/RetrievalQAChain.ts @@ -17,6 +17,7 @@ class RetrievalQAChain_Chains implements INode { baseClasses: string[] description: string inputs: INodeParams[] + badge: string constructor() { this.label = 'Retrieval QA Chain' @@ -24,6 +25,7 @@ class RetrievalQAChain_Chains implements INode { this.version = 2.0 this.type = 'RetrievalQAChain' this.icon = 'qa.svg' + this.badge = 'DEPRECATING' this.category = 'Chains' this.description = 'QA chain to answer a question based on the retrieved documents' this.baseClasses = [this.type, ...getBaseClasses(RetrievalQAChain)] @@ -53,7 +55,7 @@ class RetrievalQAChain_Chains implements INode { const model = nodeData.inputs?.model as BaseLanguageModel const vectorStoreRetriever = nodeData.inputs?.vectorStoreRetriever as BaseRetriever - const chain = RetrievalQAChain.fromLLM(model, vectorStoreRetriever, { verbose: process.env.DEBUG === 'true' }) + const chain = RetrievalQAChain.fromLLM(model, vectorStoreRetriever, { verbose: process.env.DEBUG === 'true' ? true : false }) return chain } @@ -80,7 +82,7 @@ class RetrievalQAChain_Chains implements INode { const obj = { query: input } - const loggerHandler = new ConsoleCallbackHandler(options.logger) + const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId) const callbacks = await additionalCallbacks(nodeData, options) if (shouldStreamResponse) { diff --git a/packages/components/nodes/chains/SqlDatabaseChain/SqlDatabaseChain.ts b/packages/components/nodes/chains/SqlDatabaseChain/SqlDatabaseChain.ts index cc062fb76..539e2031d 100644 --- a/packages/components/nodes/chains/SqlDatabaseChain/SqlDatabaseChain.ts +++ b/packages/components/nodes/chains/SqlDatabaseChain/SqlDatabaseChain.ts @@ -194,7 +194,7 @@ class SqlDatabaseChain_Chains implements INode { topK, customPrompt ) - const loggerHandler = new ConsoleCallbackHandler(options.logger) + const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId) const callbacks = await additionalCallbacks(nodeData, options) if (shouldStreamResponse) { @@ -241,7 +241,7 @@ const getSQLDBChain = async ( const obj: SqlDatabaseChainInput = { llm, database: db, - verbose: process.env.DEBUG === 'true', + verbose: process.env.DEBUG === 'true' ? true : false, topK: topK } diff --git a/packages/components/nodes/chains/VectorDBQAChain/VectorDBQAChain.ts b/packages/components/nodes/chains/VectorDBQAChain/VectorDBQAChain.ts index ec1b2cf8b..f111f6529 100644 --- a/packages/components/nodes/chains/VectorDBQAChain/VectorDBQAChain.ts +++ b/packages/components/nodes/chains/VectorDBQAChain/VectorDBQAChain.ts @@ -17,6 +17,7 @@ class VectorDBQAChain_Chains implements INode { baseClasses: string[] description: string inputs: INodeParams[] + badge: string constructor() { this.label = 'VectorDB QA Chain' @@ -25,6 +26,7 @@ class VectorDBQAChain_Chains implements INode { this.type = 'VectorDBQAChain' this.icon = 'vectordb.svg' this.category = 'Chains' + this.badge = 'DEPRECATING' this.description = 'QA chain for vector databases' this.baseClasses = [this.type, ...getBaseClasses(VectorDBQAChain)] this.inputs = [ @@ -55,7 +57,7 @@ class VectorDBQAChain_Chains implements INode { const chain = VectorDBQAChain.fromLLM(model, vectorStore, { k: (vectorStore as any)?.k ?? 4, - verbose: process.env.DEBUG === 'true' + verbose: process.env.DEBUG === 'true' ? true : false }) return chain } @@ -84,7 +86,7 @@ class VectorDBQAChain_Chains implements INode { query: input } - const loggerHandler = new ConsoleCallbackHandler(options.logger) + const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId) const callbacks = await additionalCallbacks(nodeData, options) if (shouldStreamResponse) { diff --git a/packages/components/nodes/chatmodels/AWSBedrock/AWSChatBedrock.ts b/packages/components/nodes/chatmodels/AWSBedrock/AWSChatBedrock.ts index b48bc7f0d..915b2412b 100644 --- a/packages/components/nodes/chatmodels/AWSBedrock/AWSChatBedrock.ts +++ b/packages/components/nodes/chatmodels/AWSBedrock/AWSChatBedrock.ts @@ -23,7 +23,7 @@ class AWSChatBedrock_ChatModels implements INode { constructor() { this.label = 'AWS ChatBedrock' this.name = 'awsChatBedrock' - this.version = 6.0 + this.version = 6.1 this.type = 'AWSChatBedrock' this.icon = 'aws.svg' this.category = 'Chat Models' @@ -100,6 +100,16 @@ class AWSChatBedrock_ChatModels implements INode { 'Allow image input. Refer to the docs for more details.', default: false, optional: true + }, + { + label: 'Latency Optimized', + name: 'latencyOptimized', + type: 'boolean', + description: + 'Enable latency optimized configuration for supported models. Refer to the supported latecny optimized models for more details.', + default: false, + optional: true, + additionalParams: true } ] } @@ -122,6 +132,7 @@ class AWSChatBedrock_ChatModels implements INode { const iMax_tokens_to_sample = nodeData.inputs?.max_tokens_to_sample as string const cache = nodeData.inputs?.cache as BaseCache const streaming = nodeData.inputs?.streaming as boolean + const latencyOptimized = nodeData.inputs?.latencyOptimized as boolean const obj: ChatBedrockConverseInput = { region: iRegion, @@ -131,6 +142,10 @@ class AWSChatBedrock_ChatModels implements INode { streaming: streaming ?? true } + if (latencyOptimized) { + obj.performanceConfig = { latency: 'optimized' } + } + /** * Long-term credentials specified in LLM configuration are optional. * Bedrock's credential provider falls back to the AWS SDK to fetch diff --git a/packages/components/nodes/chatmodels/AzureChatOpenAI/AzureChatOpenAI.ts b/packages/components/nodes/chatmodels/AzureChatOpenAI/AzureChatOpenAI.ts index 02834a105..786a17d49 100644 --- a/packages/components/nodes/chatmodels/AzureChatOpenAI/AzureChatOpenAI.ts +++ b/packages/components/nodes/chatmodels/AzureChatOpenAI/AzureChatOpenAI.ts @@ -1,9 +1,10 @@ -import { AzureOpenAIInput, AzureChatOpenAI as LangchainAzureChatOpenAI, ChatOpenAIFields, OpenAIClient } from '@langchain/openai' +import { AzureOpenAIInput, AzureChatOpenAI as LangchainAzureChatOpenAI, ChatOpenAIFields } from '@langchain/openai' import { BaseCache } from '@langchain/core/caches' import { ICommonObject, IMultiModalOption, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../src/Interface' import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' import { getModels, MODEL_TYPE } from '../../../src/modelLoader' import { AzureChatOpenAI } from './FlowiseAzureChatOpenAI' +import { OpenAI as OpenAIClient } from 'openai' const serverCredentialsExists = !!process.env.AZURE_OPENAI_API_KEY && @@ -26,7 +27,7 @@ class AzureChatOpenAI_ChatModels implements INode { constructor() { this.label = 'Azure ChatOpenAI' this.name = 'azureChatOpenAI' - this.version = 7.0 + this.version = 7.1 this.type = 'AzureChatOpenAI' this.icon = 'Azure.svg' this.category = 'Chat Models' @@ -154,6 +155,15 @@ class AzureChatOpenAI_ChatModels implements INode { optional: false, additionalParams: true }, + { + label: 'Reasoning', + description: 'Whether the model supports reasoning. Only applicable for reasoning models.', + name: 'reasoning', + type: 'boolean', + default: false, + optional: true, + additionalParams: true + }, { label: 'Reasoning Effort', description: 'Constrains effort on reasoning for reasoning models. Only applicable for o1 and o3 models.', @@ -173,9 +183,34 @@ class AzureChatOpenAI_ChatModels implements INode { name: 'high' } ], - default: 'medium', - optional: false, - additionalParams: true + additionalParams: true, + show: { + reasoning: true + } + }, + { + label: 'Reasoning Summary', + description: `A summary of the reasoning performed by the model. This can be useful for debugging and understanding the model's reasoning process`, + name: 'reasoningSummary', + type: 'options', + options: [ + { + label: 'Auto', + name: 'auto' + }, + { + label: 'Concise', + name: 'concise' + }, + { + label: 'Detailed', + name: 'detailed' + } + ], + additionalParams: true, + show: { + reasoning: true + } } ] } @@ -199,7 +234,8 @@ class AzureChatOpenAI_ChatModels implements INode { const topP = nodeData.inputs?.topP as string const basePath = nodeData.inputs?.basepath as string const baseOptions = nodeData.inputs?.baseOptions - const reasoningEffort = nodeData.inputs?.reasoningEffort as OpenAIClient.Chat.ChatCompletionReasoningEffort + const reasoningEffort = nodeData.inputs?.reasoningEffort as OpenAIClient.Chat.ChatCompletionReasoningEffort | null + const reasoningSummary = nodeData.inputs?.reasoningSummary as 'auto' | 'concise' | 'detailed' | null const credentialData = await getCredentialData(nodeData.credential ?? '', options) const azureOpenAIApiKey = getCredentialParam('azureOpenAIApiKey', credentialData, nodeData) @@ -237,11 +273,22 @@ class AzureChatOpenAI_ChatModels implements INode { console.error('Error parsing base options', exception) } } - if (modelName === 'o3-mini' || modelName.includes('o1')) { + if (modelName.includes('o1') || modelName.includes('o3') || modelName.includes('gpt-5')) { delete obj.temperature - } - if ((modelName.includes('o1') || modelName.includes('o3')) && reasoningEffort) { - obj.reasoningEffort = reasoningEffort + delete obj.stop + const reasoning: OpenAIClient.Reasoning = {} + if (reasoningEffort) { + reasoning.effort = reasoningEffort + } + if (reasoningSummary) { + reasoning.summary = reasoningSummary + } + obj.reasoning = reasoning + + if (maxTokens) { + delete obj.maxTokens + obj.maxCompletionTokens = parseInt(maxTokens, 10) + } } const multiModalOption: IMultiModalOption = { diff --git a/packages/components/nodes/chatmodels/AzureChatOpenAI/FlowiseAzureChatOpenAI.ts b/packages/components/nodes/chatmodels/AzureChatOpenAI/FlowiseAzureChatOpenAI.ts index 7a86a3a37..b28f34f19 100644 --- a/packages/components/nodes/chatmodels/AzureChatOpenAI/FlowiseAzureChatOpenAI.ts +++ b/packages/components/nodes/chatmodels/AzureChatOpenAI/FlowiseAzureChatOpenAI.ts @@ -6,6 +6,7 @@ export class AzureChatOpenAI extends LangchainAzureChatOpenAI implements IVision configuredModel: string configuredMaxToken?: number multiModalOption: IMultiModalOption + builtInTools: Record[] = [] id: string constructor( @@ -27,7 +28,7 @@ export class AzureChatOpenAI extends LangchainAzureChatOpenAI implements IVision } revertToOriginalModel(): void { - this.modelName = this.configuredModel + this.model = this.configuredModel this.maxTokens = this.configuredMaxToken } @@ -38,4 +39,8 @@ export class AzureChatOpenAI extends LangchainAzureChatOpenAI implements IVision setVisionModel(): void { // pass } + + addBuiltInTools(builtInTool: Record): void { + this.builtInTools.push(builtInTool) + } } diff --git a/packages/components/nodes/chatmodels/AzureChatOpenAI/README.md b/packages/components/nodes/chatmodels/AzureChatOpenAI/README.md index f12f42dc1..3bfd33964 100644 --- a/packages/components/nodes/chatmodels/AzureChatOpenAI/README.md +++ b/packages/components/nodes/chatmodels/AzureChatOpenAI/README.md @@ -4,13 +4,13 @@ Azure OpenAI Chat Model integration for Flowise ## ๐ŸŒฑ Env Variables -| Variable | Description | Type | Default | -| ---------------------------- | ----------------------------------------------------------------------------------------------- | ------------------------------------------------ | ----------------------------------- | -| AZURE_OPENAI_API_KEY | Default `credential.azureOpenAIApiKey` for Azure OpenAI Model | String | | -| AZURE_OPENAI_API_INSTANCE_NAME | Default `credential.azureOpenAIApiInstanceName` for Azure OpenAI Model | String | | -| AZURE_OPENAI_API_DEPLOYMENT_NAME | Default `credential.azureOpenAIApiDeploymentName` for Azure OpenAI Model | String | | -| AZURE_OPENAI_API_VERSION | Default `credential.azureOpenAIApiVersion` for Azure OpenAI Model | String | | +| Variable | Description | Type | Default | +| -------------------------------- | ------------------------------------------------------------------------ | ------ | ------- | +| AZURE_OPENAI_API_KEY | Default `credential.azureOpenAIApiKey` for Azure OpenAI Model | String | | +| AZURE_OPENAI_API_INSTANCE_NAME | Default `credential.azureOpenAIApiInstanceName` for Azure OpenAI Model | String | | +| AZURE_OPENAI_API_DEPLOYMENT_NAME | Default `credential.azureOpenAIApiDeploymentName` for Azure OpenAI Model | String | | +| AZURE_OPENAI_API_VERSION | Default `credential.azureOpenAIApiVersion` for Azure OpenAI Model | String | | ## License -Source code in this repository is made available under the [Apache License Version 2.0](https://github.com/FlowiseAI/Flowise/blob/master/LICENSE.md). \ No newline at end of file +Source code in this repository is made available under the [Apache License Version 2.0](https://github.com/FlowiseAI/Flowise/blob/master/LICENSE.md). diff --git a/packages/components/nodes/chatmodels/ChatAnthropic/ChatAnthropic.ts b/packages/components/nodes/chatmodels/ChatAnthropic/ChatAnthropic.ts index 7204801f9..27f2c7eb4 100644 --- a/packages/components/nodes/chatmodels/ChatAnthropic/ChatAnthropic.ts +++ b/packages/components/nodes/chatmodels/ChatAnthropic/ChatAnthropic.ts @@ -91,7 +91,7 @@ class ChatAnthropic_ChatModels implements INode { label: 'Extended Thinking', name: 'extendedThinking', type: 'boolean', - description: 'Enable extended thinking for reasoning model such as Claude Sonnet 3.7', + description: 'Enable extended thinking for reasoning model such as Claude Sonnet 3.7 and Claude 4', optional: true, additionalParams: true }, diff --git a/packages/components/nodes/chatmodels/ChatCerebras/ChatCerebras.ts b/packages/components/nodes/chatmodels/ChatCerebras/ChatCerebras.ts index 2d65ebeb7..8da49a2cc 100644 --- a/packages/components/nodes/chatmodels/ChatCerebras/ChatCerebras.ts +++ b/packages/components/nodes/chatmodels/ChatCerebras/ChatCerebras.ts @@ -136,7 +136,8 @@ class ChatCerebras_ChatModels implements INode { const obj: ChatOpenAIFields = { temperature: parseFloat(temperature), - modelName, + model: modelName, + apiKey: cerebrasAIApiKey, openAIApiKey: cerebrasAIApiKey, streaming: streaming ?? true } diff --git a/packages/components/nodes/chatmodels/ChatCometAPI/ChatCometAPI.ts b/packages/components/nodes/chatmodels/ChatCometAPI/ChatCometAPI.ts new file mode 100644 index 000000000..295c5e7ce --- /dev/null +++ b/packages/components/nodes/chatmodels/ChatCometAPI/ChatCometAPI.ts @@ -0,0 +1,176 @@ +import { BaseCache } from '@langchain/core/caches' +import { ChatOpenAI, ChatOpenAIFields } from '@langchain/openai' +import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' + +class ChatCometAPI_ChatModels implements INode { + readonly baseURL: string = 'https://api.cometapi.com/v1' + label: string + name: string + version: number + type: string + icon: string + category: string + description: string + baseClasses: string[] + credential: INodeParams + inputs: INodeParams[] + + constructor() { + this.label = 'ChatCometAPI' + this.name = 'chatCometAPI' + this.version = 1.0 + this.type = 'ChatCometAPI' + this.icon = 'cometapi.svg' + this.category = 'Chat Models' + this.description = 'Wrapper around CometAPI large language models that use the Chat endpoint' + this.baseClasses = [this.type, ...getBaseClasses(ChatOpenAI)] + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + credentialNames: ['cometApi'] + } + this.inputs = [ + { + label: 'Cache', + name: 'cache', + type: 'BaseCache', + optional: true + }, + { + label: 'Model Name', + name: 'modelName', + type: 'string', + default: 'gpt-5-mini', + description: 'Enter the model name (e.g., gpt-5-mini, claude-sonnet-4-20250514, gemini-2.0-flash)' + }, + { + label: 'Temperature', + name: 'temperature', + type: 'number', + step: 0.1, + default: 0.7, + optional: true + }, + { + label: 'Streaming', + name: 'streaming', + type: 'boolean', + default: true, + optional: true, + additionalParams: true + }, + { + label: 'Max Tokens', + name: 'maxTokens', + type: 'number', + step: 1, + optional: true, + additionalParams: true + }, + { + label: 'Top Probability', + name: 'topP', + type: 'number', + step: 0.1, + optional: true, + additionalParams: true + }, + { + label: 'Frequency Penalty', + name: 'frequencyPenalty', + type: 'number', + step: 0.1, + optional: true, + additionalParams: true + }, + { + label: 'Presence Penalty', + name: 'presencePenalty', + type: 'number', + step: 0.1, + optional: true, + additionalParams: true + }, + { + label: 'Base Options', + name: 'baseOptions', + type: 'json', + optional: true, + additionalParams: true, + description: 'Additional options to pass to the CometAPI client. This should be a JSON object.' + } + ] + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const temperature = nodeData.inputs?.temperature as string + const modelName = nodeData.inputs?.modelName as string + const maxTokens = nodeData.inputs?.maxTokens as string + const topP = nodeData.inputs?.topP as string + const frequencyPenalty = nodeData.inputs?.frequencyPenalty as string + const presencePenalty = nodeData.inputs?.presencePenalty as string + const streaming = nodeData.inputs?.streaming as boolean + const baseOptions = nodeData.inputs?.baseOptions + + if (nodeData.inputs?.credentialId) { + nodeData.credential = nodeData.inputs?.credentialId + } + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const openAIApiKey = getCredentialParam('cometApiKey', credentialData, nodeData) + + // Custom error handling for missing API key + if (!openAIApiKey || openAIApiKey.trim() === '') { + throw new Error( + 'CometAPI API Key is missing or empty. Please provide a valid CometAPI API key in the credential configuration.' + ) + } + + // Custom error handling for missing model name + if (!modelName || modelName.trim() === '') { + throw new Error('Model Name is required. Please enter a valid model name (e.g., gpt-5-mini, claude-sonnet-4-20250514).') + } + + const cache = nodeData.inputs?.cache as BaseCache + + const obj: ChatOpenAIFields = { + temperature: parseFloat(temperature), + modelName, + openAIApiKey, + apiKey: openAIApiKey, + streaming: streaming ?? true + } + + if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10) + if (topP) obj.topP = parseFloat(topP) + if (frequencyPenalty) obj.frequencyPenalty = parseFloat(frequencyPenalty) + if (presencePenalty) obj.presencePenalty = parseFloat(presencePenalty) + if (cache) obj.cache = cache + + let parsedBaseOptions: any | undefined = undefined + + if (baseOptions) { + try { + parsedBaseOptions = typeof baseOptions === 'object' ? baseOptions : JSON.parse(baseOptions) + if (parsedBaseOptions.baseURL) { + console.warn("The 'baseURL' parameter is not allowed when using the ChatCometAPI node.") + parsedBaseOptions.baseURL = undefined + } + } catch (exception) { + throw new Error('Invalid JSON in the BaseOptions: ' + exception) + } + } + + const model = new ChatOpenAI({ + ...obj, + configuration: { + baseURL: this.baseURL, + ...parsedBaseOptions + } + }) + return model + } +} + +module.exports = { nodeClass: ChatCometAPI_ChatModels } diff --git a/packages/components/nodes/chatmodels/ChatCometAPI/cometapi.svg b/packages/components/nodes/chatmodels/ChatCometAPI/cometapi.svg new file mode 100644 index 000000000..9f1d803d4 --- /dev/null +++ b/packages/components/nodes/chatmodels/ChatCometAPI/cometapi.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/packages/components/nodes/chatmodels/ChatFireworks/ChatFireworks.ts b/packages/components/nodes/chatmodels/ChatFireworks/ChatFireworks.ts index 2f8b6abee..b89d1de8c 100644 --- a/packages/components/nodes/chatmodels/ChatFireworks/ChatFireworks.ts +++ b/packages/components/nodes/chatmodels/ChatFireworks/ChatFireworks.ts @@ -1,7 +1,7 @@ import { BaseCache } from '@langchain/core/caches' -import { ChatFireworks } from '@langchain/community/chat_models/fireworks' import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' +import { ChatFireworks, ChatFireworksParams } from './core' class ChatFireworks_ChatModels implements INode { label: string @@ -41,8 +41,8 @@ class ChatFireworks_ChatModels implements INode { label: 'Model', name: 'modelName', type: 'string', - default: 'accounts/fireworks/models/llama-v2-13b-chat', - placeholder: 'accounts/fireworks/models/llama-v2-13b-chat' + default: 'accounts/fireworks/models/llama-v3p1-8b-instruct', + placeholder: 'accounts/fireworks/models/llama-v3p1-8b-instruct' }, { label: 'Temperature', @@ -71,9 +71,8 @@ class ChatFireworks_ChatModels implements INode { const credentialData = await getCredentialData(nodeData.credential ?? '', options) const fireworksApiKey = getCredentialParam('fireworksApiKey', credentialData, nodeData) - const obj: Partial = { + const obj: ChatFireworksParams = { fireworksApiKey, - model: modelName, modelName, temperature: temperature ? parseFloat(temperature) : undefined, streaming: streaming ?? true diff --git a/packages/components/nodes/chatmodels/ChatFireworks/core.ts b/packages/components/nodes/chatmodels/ChatFireworks/core.ts new file mode 100644 index 000000000..a3b3cd111 --- /dev/null +++ b/packages/components/nodes/chatmodels/ChatFireworks/core.ts @@ -0,0 +1,126 @@ +import type { BaseChatModelParams, LangSmithParams } from '@langchain/core/language_models/chat_models' +import { + type OpenAIClient, + type ChatOpenAICallOptions, + type OpenAIChatInput, + type OpenAICoreRequestOptions, + ChatOpenAICompletions +} from '@langchain/openai' + +import { getEnvironmentVariable } from '@langchain/core/utils/env' + +type FireworksUnsupportedArgs = 'frequencyPenalty' | 'presencePenalty' | 'logitBias' | 'functions' + +type FireworksUnsupportedCallOptions = 'functions' | 'function_call' + +export type ChatFireworksCallOptions = Partial> + +export type ChatFireworksParams = Partial> & + BaseChatModelParams & { + /** + * Prefer `apiKey` + */ + fireworksApiKey?: string + /** + * The Fireworks API key to use. + */ + apiKey?: string + } + +export class ChatFireworks extends ChatOpenAICompletions { + static lc_name() { + return 'ChatFireworks' + } + + _llmType() { + return 'fireworks' + } + + get lc_secrets(): { [key: string]: string } | undefined { + return { + fireworksApiKey: 'FIREWORKS_API_KEY', + apiKey: 'FIREWORKS_API_KEY' + } + } + + lc_serializable = true + + fireworksApiKey?: string + + apiKey?: string + + constructor(fields?: ChatFireworksParams) { + const fireworksApiKey = fields?.apiKey || fields?.fireworksApiKey || getEnvironmentVariable('FIREWORKS_API_KEY') + + if (!fireworksApiKey) { + throw new Error( + `Fireworks API key not found. Please set the FIREWORKS_API_KEY environment variable or provide the key into "fireworksApiKey"` + ) + } + + super({ + ...fields, + model: fields?.model || fields?.modelName || 'accounts/fireworks/models/llama-v3p1-8b-instruct', + apiKey: fireworksApiKey, + configuration: { + baseURL: 'https://api.fireworks.ai/inference/v1' + }, + streamUsage: false + }) + + this.fireworksApiKey = fireworksApiKey + this.apiKey = fireworksApiKey + } + + getLsParams(options: any): LangSmithParams { + const params = super.getLsParams(options) + params.ls_provider = 'fireworks' + return params + } + + toJSON() { + const result = super.toJSON() + + if ('kwargs' in result && typeof result.kwargs === 'object' && result.kwargs != null) { + delete result.kwargs.openai_api_key + delete result.kwargs.configuration + } + + return result + } + + // eslint-disable-next-line + async completionWithRetry( + request: OpenAIClient.Chat.ChatCompletionCreateParamsStreaming, + options?: OpenAICoreRequestOptions + ): Promise> + + // eslint-disable-next-line + async completionWithRetry( + request: OpenAIClient.Chat.ChatCompletionCreateParamsNonStreaming, + options?: OpenAICoreRequestOptions + ): Promise + + /** + * Calls the Fireworks API with retry logic in case of failures. + * @param request The request to send to the Fireworks API. + * @param options Optional configuration for the API call. + * @returns The response from the Fireworks API. + */ + // eslint-disable-next-line + async completionWithRetry( + request: OpenAIClient.Chat.ChatCompletionCreateParamsStreaming | OpenAIClient.Chat.ChatCompletionCreateParamsNonStreaming, + options?: OpenAICoreRequestOptions + ): Promise | OpenAIClient.Chat.Completions.ChatCompletion> { + delete request.frequency_penalty + delete request.presence_penalty + delete request.logit_bias + delete request.functions + + if (request.stream === true) { + return super.completionWithRetry(request, options) + } + + return super.completionWithRetry(request, options) + } +} diff --git a/packages/components/nodes/chatmodels/ChatGoogleGenerativeAI/ChatGoogleGenerativeAI.ts b/packages/components/nodes/chatmodels/ChatGoogleGenerativeAI/ChatGoogleGenerativeAI.ts index 9d15abba6..d618254c0 100644 --- a/packages/components/nodes/chatmodels/ChatGoogleGenerativeAI/ChatGoogleGenerativeAI.ts +++ b/packages/components/nodes/chatmodels/ChatGoogleGenerativeAI/ChatGoogleGenerativeAI.ts @@ -2,10 +2,9 @@ import { HarmBlockThreshold, HarmCategory } from '@google/generative-ai' import type { SafetySetting } from '@google/generative-ai' import { BaseCache } from '@langchain/core/caches' import { ICommonObject, IMultiModalOption, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../src/Interface' -import { convertMultiOptionsToStringArray, getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' +import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' import { getModels, MODEL_TYPE } from '../../../src/modelLoader' import { ChatGoogleGenerativeAI, GoogleGenerativeAIChatInput } from './FlowiseChatGoogleGenerativeAI' -import type FlowiseGoogleAICacheManager from '../../cache/GoogleGenerativeAIContextCache/FlowiseGoogleAICacheManager' class GoogleGenerativeAI_ChatModels implements INode { label: string @@ -22,7 +21,7 @@ class GoogleGenerativeAI_ChatModels implements INode { constructor() { this.label = 'ChatGoogleGenerativeAI' this.name = 'chatGoogleGenerativeAI' - this.version = 3.0 + this.version = 3.1 this.type = 'ChatGoogleGenerativeAI' this.icon = 'GoogleGemini.svg' this.category = 'Chat Models' @@ -43,12 +42,6 @@ class GoogleGenerativeAI_ChatModels implements INode { type: 'BaseCache', optional: true }, - { - label: 'Context Cache', - name: 'contextCache', - type: 'GoogleAICacheManager', - optional: true - }, { label: 'Model Name', name: 'modelName', @@ -107,62 +100,91 @@ class GoogleGenerativeAI_ChatModels implements INode { additionalParams: true }, { - label: 'Harm Category', - name: 'harmCategory', - type: 'multiOptions', + label: 'Safety Settings', + name: 'safetySettings', + type: 'array', description: - 'Refer to official guide on how to use Harm Category', - options: [ + 'Safety settings for the model. Refer to the official guide on how to use Safety Settings', + array: [ { - label: 'Dangerous', - name: HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT + label: 'Harm Category', + name: 'harmCategory', + type: 'options', + options: [ + { + label: 'Dangerous', + name: HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT, + description: 'Promotes, facilitates, or encourages harmful acts.' + }, + { + label: 'Harassment', + name: HarmCategory.HARM_CATEGORY_HARASSMENT, + description: 'Negative or harmful comments targeting identity and/or protected attributes.' + }, + { + label: 'Hate Speech', + name: HarmCategory.HARM_CATEGORY_HATE_SPEECH, + description: 'Content that is rude, disrespectful, or profane.' + }, + { + label: 'Sexually Explicit', + name: HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT, + description: 'Contains references to sexual acts or other lewd content.' + }, + { + label: 'Civic Integrity', + name: HarmCategory.HARM_CATEGORY_CIVIC_INTEGRITY, + description: 'Election-related queries.' + } + ] }, { - label: 'Harassment', - name: HarmCategory.HARM_CATEGORY_HARASSMENT - }, - { - label: 'Hate Speech', - name: HarmCategory.HARM_CATEGORY_HATE_SPEECH - }, - { - label: 'Sexually Explicit', - name: HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT + label: 'Harm Block Threshold', + name: 'harmBlockThreshold', + type: 'options', + options: [ + { + label: 'None', + name: HarmBlockThreshold.BLOCK_NONE, + description: 'Always show regardless of probability of unsafe content' + }, + { + label: 'Only High', + name: HarmBlockThreshold.BLOCK_ONLY_HIGH, + description: 'Block when high probability of unsafe content' + }, + { + label: 'Medium and Above', + name: HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE, + description: 'Block when medium or high probability of unsafe content' + }, + { + label: 'Low and Above', + name: HarmBlockThreshold.BLOCK_LOW_AND_ABOVE, + description: 'Block when low, medium or high probability of unsafe content' + }, + { + label: 'Threshold Unspecified (Default Threshold)', + name: HarmBlockThreshold.HARM_BLOCK_THRESHOLD_UNSPECIFIED, + description: 'Threshold is unspecified, block using default threshold' + } + ] } ], optional: true, additionalParams: true }, { - label: 'Harm Block Threshold', - name: 'harmBlockThreshold', - type: 'multiOptions', - description: - 'Refer to official guide on how to use Harm Block Threshold', - options: [ - { - label: 'Low and Above', - name: HarmBlockThreshold.BLOCK_LOW_AND_ABOVE - }, - { - label: 'Medium and Above', - name: HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE - }, - { - label: 'None', - name: HarmBlockThreshold.BLOCK_NONE - }, - { - label: 'Only High', - name: HarmBlockThreshold.BLOCK_ONLY_HIGH - }, - { - label: 'Threshold Unspecified', - name: HarmBlockThreshold.HARM_BLOCK_THRESHOLD_UNSPECIFIED - } - ], + label: 'Thinking Budget', + name: 'thinkingBudget', + type: 'number', + description: 'Guides the number of thinking tokens. -1 for dynamic, 0 to disable, or positive integer (Gemini 2.5 models).', + step: 1, optional: true, - additionalParams: true + additionalParams: true, + show: { + modelName: ['gemini-2.5-pro', 'gemini-2.5-flash', 'gemini-2.5-flash-lite'] + } }, { label: 'Base URL', @@ -201,39 +223,59 @@ class GoogleGenerativeAI_ChatModels implements INode { const maxOutputTokens = nodeData.inputs?.maxOutputTokens as string const topP = nodeData.inputs?.topP as string const topK = nodeData.inputs?.topK as string - const harmCategory = nodeData.inputs?.harmCategory as string - const harmBlockThreshold = nodeData.inputs?.harmBlockThreshold as string + const _safetySettings = nodeData.inputs?.safetySettings as string + const cache = nodeData.inputs?.cache as BaseCache - const contextCache = nodeData.inputs?.contextCache as FlowiseGoogleAICacheManager const streaming = nodeData.inputs?.streaming as boolean const baseUrl = nodeData.inputs?.baseUrl as string | undefined + const thinkingBudget = nodeData.inputs?.thinkingBudget as string const allowImageUploads = nodeData.inputs?.allowImageUploads as boolean - const obj: Partial = { + const obj: GoogleGenerativeAIChatInput = { apiKey: apiKey, - modelName: customModelName || modelName, + model: customModelName || modelName, streaming: streaming ?? true } + // this extra metadata is needed, as langchain does not show the model name in the callbacks. + obj.metadata = { + fw_model_name: customModelName || modelName + } if (maxOutputTokens) obj.maxOutputTokens = parseInt(maxOutputTokens, 10) if (topP) obj.topP = parseFloat(topP) if (topK) obj.topK = parseFloat(topK) if (cache) obj.cache = cache if (temperature) obj.temperature = parseFloat(temperature) if (baseUrl) obj.baseUrl = baseUrl + if (thinkingBudget) obj.thinkingBudget = parseInt(thinkingBudget, 10) - // Safety Settings - let harmCategories: string[] = convertMultiOptionsToStringArray(harmCategory) - let harmBlockThresholds: string[] = convertMultiOptionsToStringArray(harmBlockThreshold) - if (harmCategories.length != harmBlockThresholds.length) - throw new Error(`Harm Category & Harm Block Threshold are not the same length`) - const safetySettings: SafetySetting[] = harmCategories.map((harmCategory, index) => { - return { - category: harmCategory as HarmCategory, - threshold: harmBlockThresholds[index] as HarmBlockThreshold + let safetySettings: SafetySetting[] = [] + if (_safetySettings) { + try { + const parsedSafetySettings = typeof _safetySettings === 'string' ? JSON.parse(_safetySettings) : _safetySettings + if (Array.isArray(parsedSafetySettings)) { + const validSettings = parsedSafetySettings + .filter((setting: any) => setting.harmCategory && setting.harmBlockThreshold) + .map((setting: any) => ({ + category: setting.harmCategory as HarmCategory, + threshold: setting.harmBlockThreshold as HarmBlockThreshold + })) + + // Remove duplicates by keeping only the first occurrence of each harm category + const seenCategories = new Set() + safetySettings = validSettings.filter((setting) => { + if (seenCategories.has(setting.category)) { + return false + } + seenCategories.add(setting.category) + return true + }) + } + } catch (error) { + console.warn('Failed to parse safety settings:', error) } - }) + } if (safetySettings.length > 0) obj.safetySettings = safetySettings const multiModalOption: IMultiModalOption = { @@ -244,7 +286,6 @@ class GoogleGenerativeAI_ChatModels implements INode { const model = new ChatGoogleGenerativeAI(nodeData.id, obj) model.setMultiModalOption(multiModalOption) - if (contextCache) model.setContextCache(contextCache) return model } diff --git a/packages/components/nodes/chatmodels/ChatGoogleGenerativeAI/FlowiseChatGoogleGenerativeAI.ts b/packages/components/nodes/chatmodels/ChatGoogleGenerativeAI/FlowiseChatGoogleGenerativeAI.ts index 4824810eb..cdf3ac118 100644 --- a/packages/components/nodes/chatmodels/ChatGoogleGenerativeAI/FlowiseChatGoogleGenerativeAI.ts +++ b/packages/components/nodes/chatmodels/ChatGoogleGenerativeAI/FlowiseChatGoogleGenerativeAI.ts @@ -1,34 +1,42 @@ -import { BaseMessage, AIMessage, AIMessageChunk, isBaseMessage, ChatMessage, MessageContentComplex } from '@langchain/core/messages' -import { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager' -import { BaseChatModel, type BaseChatModelParams } from '@langchain/core/language_models/chat_models' -import { ChatGeneration, ChatGenerationChunk, ChatResult } from '@langchain/core/outputs' -import { ToolCallChunk } from '@langchain/core/messages/tool' -import { NewTokenIndices } from '@langchain/core/callbacks/base' import { - EnhancedGenerateContentResponse, - Content, - Part, - Tool, GenerativeModel, - GoogleGenerativeAI as GenerativeAI -} from '@google/generative-ai' -import type { - FunctionCallPart, - FunctionResponsePart, - SafetySetting, - UsageMetadata, + GoogleGenerativeAI as GenerativeAI, FunctionDeclarationsTool as GoogleGenerativeAIFunctionDeclarationsTool, - GenerateContentRequest + FunctionDeclaration as GenerativeAIFunctionDeclaration, + type FunctionDeclarationSchema as GenerativeAIFunctionDeclarationSchema, + GenerateContentRequest, + SafetySetting, + Part as GenerativeAIPart, + ModelParams, + RequestOptions, + type CachedContent, + Schema } from '@google/generative-ai' -import { ICommonObject, IMultiModalOption, IVisionChatModal } from '../../../src' -import { StructuredToolInterface } from '@langchain/core/tools' -import { isStructuredTool } from '@langchain/core/utils/function_calling' -import { zodToJsonSchema } from 'zod-to-json-schema' -import { BaseLanguageModelCallOptions } from '@langchain/core/language_models/base' -import type FlowiseGoogleAICacheManager from '../../cache/GoogleGenerativeAIContextCache/FlowiseGoogleAICacheManager' - -const DEFAULT_IMAGE_MAX_TOKEN = 8192 -const DEFAULT_IMAGE_MODEL = 'gemini-1.5-flash-latest' +import { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager' +import { AIMessageChunk, BaseMessage, UsageMetadata } from '@langchain/core/messages' +import { ChatGenerationChunk, ChatResult } from '@langchain/core/outputs' +import { getEnvironmentVariable } from '@langchain/core/utils/env' +import { + BaseChatModel, + type BaseChatModelCallOptions, + type LangSmithParams, + type BaseChatModelParams +} from '@langchain/core/language_models/chat_models' +import { NewTokenIndices } from '@langchain/core/callbacks/base' +import { BaseLanguageModelInput, StructuredOutputMethodOptions } from '@langchain/core/language_models/base' +import { Runnable, RunnablePassthrough, RunnableSequence } from '@langchain/core/runnables' +import { InferInteropZodOutput, InteropZodType, isInteropZodSchema } from '@langchain/core/utils/types' +import { BaseLLMOutputParser, JsonOutputParser } from '@langchain/core/output_parsers' +import { schemaToGenerativeAIParameters, removeAdditionalProperties } from './utils/zod_to_genai_parameters.js' +import { + convertBaseMessagesToContent, + convertResponseContentToChatGenerationChunk, + mapGenerateContentResultToChatResult +} from './utils/common.js' +import { GoogleGenerativeAIToolsOutputParser } from './utils/output_parsers.js' +import { GoogleGenerativeAIToolType } from './utils/types.js' +import { convertToolsToGenAI } from './utils/tools.js' +import { IMultiModalOption, IVisionChatModal } from '../../../src' interface TokenUsage { completionTokens?: number @@ -36,44 +44,549 @@ interface TokenUsage { totalTokens?: number } -interface GoogleGenerativeAIChatCallOptions extends BaseLanguageModelCallOptions { - tools?: StructuredToolInterface[] | GoogleGenerativeAIFunctionDeclarationsTool[] +export type BaseMessageExamplePair = { + input: BaseMessage + output: BaseMessage +} + +export interface GoogleGenerativeAIChatCallOptions extends BaseChatModelCallOptions { + tools?: GoogleGenerativeAIToolType[] + /** + * Allowed functions to call when the mode is "any". + * If empty, any one of the provided functions are called. + */ + allowedFunctionNames?: string[] /** * Whether or not to include usage data, like token counts * in the streamed response chunks. * @default true */ streamUsage?: boolean + + /** + * JSON schema to be returned by the model. + */ + responseSchema?: Schema } +/** + * An interface defining the input to the ChatGoogleGenerativeAI class. + */ export interface GoogleGenerativeAIChatInput extends BaseChatModelParams, Pick { - modelName?: string - model?: string + /** + * Model Name to use + * + * Note: The format must follow the pattern - `{model}` + */ + model: string + + /** + * Controls the randomness of the output. + * + * Values can range from [0.0,2.0], inclusive. A value closer to 2.0 + * will produce responses that are more varied and creative, while + * a value closer to 0.0 will typically result in less surprising + * responses from the model. + * + * Note: The default value varies by model + */ temperature?: number + + /** + * Maximum number of tokens to generate in the completion. + */ maxOutputTokens?: number + + /** + * Top-p changes how the model selects tokens for output. + * + * Tokens are selected from most probable to least until the sum + * of their probabilities equals the top-p value. + * + * For example, if tokens A, B, and C have a probability of + * .3, .2, and .1 and the top-p value is .5, then the model will + * select either A or B as the next token (using temperature). + * + * Note: The default value varies by model + */ topP?: number + + /** + * Top-k changes how the model selects tokens for output. + * + * A top-k of 1 means the selected token is the most probable among + * all tokens in the model's vocabulary (also called greedy decoding), + * while a top-k of 3 means that the next token is selected from + * among the 3 most probable tokens (using temperature). + * + * Note: The default value varies by model + */ topK?: number + + /** + * The set of character sequences (up to 5) that will stop output generation. + * If specified, the API will stop at the first appearance of a stop + * sequence. + * + * Note: The stop sequence will not be included as part of the response. + * Note: stopSequences is only supported for Gemini models + */ stopSequences?: string[] + + /** + * A list of unique `SafetySetting` instances for blocking unsafe content. The API will block + * any prompts and responses that fail to meet the thresholds set by these settings. If there + * is no `SafetySetting` for a given `SafetyCategory` provided in the list, the API will use + * the default safety setting for that category. + */ safetySettings?: SafetySetting[] + + /** + * Google API key to use + */ apiKey?: string + + /** + * Google API version to use + */ apiVersion?: string + + /** + * Google API base URL to use + */ baseUrl?: string + + /** Whether to stream the results or not */ streaming?: boolean + + /** + * Whether or not to force the model to respond with JSON. + * Available for `gemini-1.5` models and later. + * @default false + */ + json?: boolean + + /** + * Whether or not model supports system instructions. + * The following models support system instructions: + * - All Gemini 1.5 Pro model versions + * - All Gemini 1.5 Flash model versions + * - Gemini 1.0 Pro version gemini-1.0-pro-002 + */ + convertSystemMessageToHumanContent?: boolean | undefined + + /** Thinking budget for Gemini 2.5 thinking models. Supports -1 (dynamic), 0 (off), or positive integers. */ + thinkingBudget?: number } -class LangchainChatGoogleGenerativeAI +/** + * Google Generative AI chat model integration. + * + * Setup: + * Install `@langchain/google-genai` and set an environment variable named `GOOGLE_API_KEY`. + * + * ```bash + * npm install @langchain/google-genai + * export GOOGLE_API_KEY="your-api-key" + * ``` + * + * ## [Constructor args](https://api.js.langchain.com/classes/langchain_google_genai.ChatGoogleGenerativeAI.html#constructor) + * + * ## [Runtime args](https://api.js.langchain.com/interfaces/langchain_google_genai.GoogleGenerativeAIChatCallOptions.html) + * + * Runtime args can be passed as the second argument to any of the base runnable methods `.invoke`. `.stream`, `.batch`, etc. + * They can also be passed via `.withConfig`, or the second arg in `.bindTools`, like shown in the examples below: + * + * ```typescript + * // When calling `.withConfig`, call options should be passed via the first argument + * const llmWithArgsBound = llm.withConfig({ + * stop: ["\n"], + * }); + * + * // When calling `.bindTools`, call options should be passed via the second argument + * const llmWithTools = llm.bindTools( + * [...], + * { + * stop: ["\n"], + * } + * ); + * ``` + * + * ## Examples + * + *
+ * Instantiate + * + * ```typescript + * import { ChatGoogleGenerativeAI } from '@langchain/google-genai'; + * + * const llm = new ChatGoogleGenerativeAI({ + * model: "gemini-1.5-flash", + * temperature: 0, + * maxRetries: 2, + * // apiKey: "...", + * // other params... + * }); + * ``` + *
+ * + *
+ * + *
+ * Invoking + * + * ```typescript + * const input = `Translate "I love programming" into French.`; + * + * // Models also accept a list of chat messages or a formatted prompt + * const result = await llm.invoke(input); + * console.log(result); + * ``` + * + * ```txt + * AIMessage { + * "content": "There are a few ways to translate \"I love programming\" into French, depending on the level of formality and nuance you want to convey:\n\n**Formal:**\n\n* **J'aime la programmation.** (This is the most literal and formal translation.)\n\n**Informal:**\n\n* **J'adore programmer.** (This is a more enthusiastic and informal translation.)\n* **J'aime beaucoup programmer.** (This is a slightly less enthusiastic but still informal translation.)\n\n**More specific:**\n\n* **J'aime beaucoup coder.** (This specifically refers to writing code.)\n* **J'aime beaucoup dรฉvelopper des logiciels.** (This specifically refers to developing software.)\n\nThe best translation will depend on the context and your intended audience. \n", + * "response_metadata": { + * "finishReason": "STOP", + * "index": 0, + * "safetyRatings": [ + * { + * "category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", + * "probability": "NEGLIGIBLE" + * }, + * { + * "category": "HARM_CATEGORY_HATE_SPEECH", + * "probability": "NEGLIGIBLE" + * }, + * { + * "category": "HARM_CATEGORY_HARASSMENT", + * "probability": "NEGLIGIBLE" + * }, + * { + * "category": "HARM_CATEGORY_DANGEROUS_CONTENT", + * "probability": "NEGLIGIBLE" + * } + * ] + * }, + * "usage_metadata": { + * "input_tokens": 10, + * "output_tokens": 149, + * "total_tokens": 159 + * } + * } + * ``` + *
+ * + *
+ * + *
+ * Streaming Chunks + * + * ```typescript + * for await (const chunk of await llm.stream(input)) { + * console.log(chunk); + * } + * ``` + * + * ```txt + * AIMessageChunk { + * "content": "There", + * "response_metadata": { + * "index": 0 + * } + * "usage_metadata": { + * "input_tokens": 10, + * "output_tokens": 1, + * "total_tokens": 11 + * } + * } + * AIMessageChunk { + * "content": " are a few ways to translate \"I love programming\" into French, depending on", + * } + * AIMessageChunk { + * "content": " the level of formality and nuance you want to convey:\n\n**Formal:**\n\n", + * } + * AIMessageChunk { + * "content": "* **J'aime la programmation.** (This is the most literal and formal translation.)\n\n**Informal:**\n\n* **J'adore programmer.** (This", + * } + * AIMessageChunk { + * "content": " is a more enthusiastic and informal translation.)\n* **J'aime beaucoup programmer.** (This is a slightly less enthusiastic but still informal translation.)\n\n**More", + * } + * AIMessageChunk { + * "content": " specific:**\n\n* **J'aime beaucoup coder.** (This specifically refers to writing code.)\n* **J'aime beaucoup dรฉvelopper des logiciels.** (This specifically refers to developing software.)\n\nThe best translation will depend on the context and", + * } + * AIMessageChunk { + * "content": " your intended audience. \n", + * } + * ``` + *
+ * + *
+ * + *
+ * Aggregate Streamed Chunks + * + * ```typescript + * import { AIMessageChunk } from '@langchain/core/messages'; + * import { concat } from '@langchain/core/utils/stream'; + * + * const stream = await llm.stream(input); + * let full: AIMessageChunk | undefined; + * for await (const chunk of stream) { + * full = !full ? chunk : concat(full, chunk); + * } + * console.log(full); + * ``` + * + * ```txt + * AIMessageChunk { + * "content": "There are a few ways to translate \"I love programming\" into French, depending on the level of formality and nuance you want to convey:\n\n**Formal:**\n\n* **J'aime la programmation.** (This is the most literal and formal translation.)\n\n**Informal:**\n\n* **J'adore programmer.** (This is a more enthusiastic and informal translation.)\n* **J'aime beaucoup programmer.** (This is a slightly less enthusiastic but still informal translation.)\n\n**More specific:**\n\n* **J'aime beaucoup coder.** (This specifically refers to writing code.)\n* **J'aime beaucoup dรฉvelopper des logiciels.** (This specifically refers to developing software.)\n\nThe best translation will depend on the context and your intended audience. \n", + * "usage_metadata": { + * "input_tokens": 10, + * "output_tokens": 277, + * "total_tokens": 287 + * } + * } + * ``` + *
+ * + *
+ * + *
+ * Bind tools + * + * ```typescript + * import { z } from 'zod'; + * + * const GetWeather = { + * name: "GetWeather", + * description: "Get the current weather in a given location", + * schema: z.object({ + * location: z.string().describe("The city and state, e.g. San Francisco, CA") + * }), + * } + * + * const GetPopulation = { + * name: "GetPopulation", + * description: "Get the current population in a given location", + * schema: z.object({ + * location: z.string().describe("The city and state, e.g. San Francisco, CA") + * }), + * } + * + * const llmWithTools = llm.bindTools([GetWeather, GetPopulation]); + * const aiMsg = await llmWithTools.invoke( + * "Which city is hotter today and which is bigger: LA or NY?" + * ); + * console.log(aiMsg.tool_calls); + * ``` + * + * ```txt + * [ + * { + * name: 'GetWeather', + * args: { location: 'Los Angeles, CA' }, + * type: 'tool_call' + * }, + * { + * name: 'GetWeather', + * args: { location: 'New York, NY' }, + * type: 'tool_call' + * }, + * { + * name: 'GetPopulation', + * args: { location: 'Los Angeles, CA' }, + * type: 'tool_call' + * }, + * { + * name: 'GetPopulation', + * args: { location: 'New York, NY' }, + * type: 'tool_call' + * } + * ] + * ``` + *
+ * + *
+ * + *
+ * Structured Output + * + * ```typescript + * const Joke = z.object({ + * setup: z.string().describe("The setup of the joke"), + * punchline: z.string().describe("The punchline to the joke"), + * rating: z.number().optional().describe("How funny the joke is, from 1 to 10") + * }).describe('Joke to tell user.'); + * + * const structuredLlm = llm.withStructuredOutput(Joke, { name: "Joke" }); + * const jokeResult = await structuredLlm.invoke("Tell me a joke about cats"); + * console.log(jokeResult); + * ``` + * + * ```txt + * { + * setup: "Why don\\'t cats play poker?", + * punchline: "Why don\\'t cats play poker? Because they always have an ace up their sleeve!" + * } + * ``` + *
+ * + *
+ * + *
+ * Multimodal + * + * ```typescript + * import { HumanMessage } from '@langchain/core/messages'; + * + * const imageUrl = "https://example.com/image.jpg"; + * const imageData = await fetch(imageUrl).then(res => res.arrayBuffer()); + * const base64Image = Buffer.from(imageData).toString('base64'); + * + * const message = new HumanMessage({ + * content: [ + * { type: "text", text: "describe the weather in this image" }, + * { + * type: "image_url", + * image_url: { url: `data:image/jpeg;base64,${base64Image}` }, + * }, + * ] + * }); + * + * const imageDescriptionAiMsg = await llm.invoke([message]); + * console.log(imageDescriptionAiMsg.content); + * ``` + * + * ```txt + * The weather in the image appears to be clear and sunny. The sky is mostly blue with a few scattered white clouds, indicating fair weather. The bright sunlight is casting shadows on the green, grassy hill, suggesting it is a pleasant day with good visibility. There are no signs of rain or stormy conditions. + * ``` + *
+ * + *
+ * + *
+ * Usage Metadata + * + * ```typescript + * const aiMsgForMetadata = await llm.invoke(input); + * console.log(aiMsgForMetadata.usage_metadata); + * ``` + * + * ```txt + * { input_tokens: 10, output_tokens: 149, total_tokens: 159 } + * ``` + *
+ * + *
+ * + *
+ * Response Metadata + * + * ```typescript + * const aiMsgForResponseMetadata = await llm.invoke(input); + * console.log(aiMsgForResponseMetadata.response_metadata); + * ``` + * + * ```txt + * { + * finishReason: 'STOP', + * index: 0, + * safetyRatings: [ + * { + * category: 'HARM_CATEGORY_SEXUALLY_EXPLICIT', + * probability: 'NEGLIGIBLE' + * }, + * { + * category: 'HARM_CATEGORY_HATE_SPEECH', + * probability: 'NEGLIGIBLE' + * }, + * { category: 'HARM_CATEGORY_HARASSMENT', probability: 'NEGLIGIBLE' }, + * { + * category: 'HARM_CATEGORY_DANGEROUS_CONTENT', + * probability: 'NEGLIGIBLE' + * } + * ] + * } + * ``` + *
+ * + *
+ * + *
+ * Document Messages + * + * This example will show you how to pass documents such as PDFs to Google + * Generative AI through messages. + * + * ```typescript + * const pdfPath = "/Users/my_user/Downloads/invoice.pdf"; + * const pdfBase64 = await fs.readFile(pdfPath, "base64"); + * + * const response = await llm.invoke([ + * ["system", "Use the provided documents to answer the question"], + * [ + * "user", + * [ + * { + * type: "application/pdf", // If the `type` field includes a single slash (`/`), it will be treated as inline data. + * data: pdfBase64, + * }, + * { + * type: "text", + * text: "Summarize the contents of this PDF", + * }, + * ], + * ], + * ]); + * + * console.log(response.content); + * ``` + * + * ```txt + * This is a billing invoice from Twitter Developers for X API Basic Access. The transaction date is January 7, 2025, + * and the amount is $194.34, which has been paid. The subscription period is from January 7, 2025 21:02 to February 7, 2025 00:00 (UTC). + * The tax is $0.00, with a tax rate of 0%. The total amount is $194.34. The payment was made using a Visa card ending in 7022, + * expiring in 12/2026. The billing address is Brace Sproul, 1234 Main Street, San Francisco, CA, US 94103. The company being billed is + * X Corp, located at 865 FM 1209 Building 2, Bastrop, TX, US 78602. Terms and conditions apply. + * ``` + *
+ * + *
+ */ +export class LangchainChatGoogleGenerativeAI extends BaseChatModel implements GoogleGenerativeAIChatInput { - modelName = 'gemini-pro' + static lc_name() { + return 'ChatGoogleGenerativeAI' + } - temperature?: number + lc_serializable = true + + get lc_secrets(): { [key: string]: string } | undefined { + return { + apiKey: 'GOOGLE_API_KEY' + } + } + + lc_namespace = ['langchain', 'chat_models', 'google_genai'] + + get lc_aliases() { + return { + apiKey: 'google_api_key' + } + } + + model: string + + temperature?: number // default value chosen based on model maxOutputTokens?: number - topP?: number + topP?: number // default value chosen based on model - topK?: number + topK?: number // default value chosen based on model stopSequences: string[] = [] @@ -81,37 +594,39 @@ class LangchainChatGoogleGenerativeAI apiKey?: string - baseUrl?: string - streaming = false + json?: boolean + streamUsage = true + convertSystemMessageToHumanContent: boolean | undefined + + thinkingBudget?: number + private client: GenerativeModel - private contextCache?: FlowiseGoogleAICacheManager - get _isMultimodalModel() { - return this.modelName.includes('vision') || this.modelName.startsWith('gemini-1.5') + return this.model.includes('vision') || this.model.startsWith('gemini-1.5') || this.model.startsWith('gemini-2') } - constructor(fields?: GoogleGenerativeAIChatInput) { - super(fields ?? {}) + constructor(fields: GoogleGenerativeAIChatInput) { + super(fields) - this.modelName = fields?.model?.replace(/^models\//, '') ?? fields?.modelName?.replace(/^models\//, '') ?? 'gemini-pro' + this.model = fields.model.replace(/^models\//, '') - this.maxOutputTokens = fields?.maxOutputTokens ?? this.maxOutputTokens + this.maxOutputTokens = fields.maxOutputTokens ?? this.maxOutputTokens if (this.maxOutputTokens && this.maxOutputTokens < 0) { throw new Error('`maxOutputTokens` must be a positive integer') } - this.temperature = fields?.temperature ?? this.temperature - if (this.temperature && (this.temperature < 0 || this.temperature > 1)) { - throw new Error('`temperature` must be in the range of [0.0,1.0]') + this.temperature = fields.temperature ?? this.temperature + if (this.temperature && (this.temperature < 0 || this.temperature > 2)) { + throw new Error('`temperature` must be in the range of [0.0,2.0]') } - this.topP = fields?.topP ?? this.topP + this.topP = fields.topP ?? this.topP if (this.topP && this.topP < 0) { throw new Error('`topP` must be a positive integer') } @@ -120,14 +635,14 @@ class LangchainChatGoogleGenerativeAI throw new Error('`topP` must be below 1.') } - this.topK = fields?.topK ?? this.topK + this.topK = fields.topK ?? this.topK if (this.topK && this.topK < 0) { throw new Error('`topK` must be a positive integer') } - this.stopSequences = fields?.stopSequences ?? this.stopSequences + this.stopSequences = fields.stopSequences ?? this.stopSequences - this.apiKey = fields?.apiKey ?? process.env['GOOGLE_API_KEY'] + this.apiKey = fields.apiKey ?? getEnvironmentVariable('GOOGLE_API_KEY') if (!this.apiKey) { throw new Error( 'Please set an API key for Google GenerativeAI ' + @@ -137,7 +652,7 @@ class LangchainChatGoogleGenerativeAI ) } - this.safetySettings = fields?.safetySettings ?? this.safetySettings + this.safetySettings = fields.safetySettings ?? this.safetySettings if (this.safetySettings && this.safetySettings.length > 0) { const safetySettingsSet = new Set(this.safetySettings.map((s) => s.category)) if (safetySettingsSet.size !== this.safetySettings.length) { @@ -145,39 +660,77 @@ class LangchainChatGoogleGenerativeAI } } - this.streaming = fields?.streaming ?? this.streaming + this.streaming = fields.streaming ?? this.streaming + this.json = fields.json + this.thinkingBudget = fields.thinkingBudget - this.streamUsage = fields?.streamUsage ?? this.streamUsage - - this.getClient() - } - - async getClient(prompt?: Content[], tools?: Tool[]) { - this.client = new GenerativeAI(this.apiKey ?? '').getGenerativeModel( + this.client = new GenerativeAI(this.apiKey).getGenerativeModel( { - model: this.modelName, - tools, + model: this.model, safetySettings: this.safetySettings as SafetySetting[], generationConfig: { - candidateCount: 1, stopSequences: this.stopSequences, maxOutputTokens: this.maxOutputTokens, temperature: this.temperature, topP: this.topP, - topK: this.topK + topK: this.topK, + ...(this.json ? { responseMimeType: 'application/json' } : {}) } }, { - baseUrl: this.baseUrl + apiVersion: fields.apiVersion, + baseUrl: fields.baseUrl } ) - if (this.contextCache) { - const cachedContent = await this.contextCache.lookup({ - contents: prompt ? [{ ...prompt[0], parts: prompt[0].parts.slice(0, 1) }] : [], - model: this.modelName, - tools - }) - this.client.cachedContent = cachedContent as any + if (this.thinkingBudget !== undefined) { + ;(this.client.generationConfig as any).thinkingConfig = { + ...(this.thinkingBudget !== undefined ? { thinkingBudget: this.thinkingBudget } : {}) + } + } + this.streamUsage = fields.streamUsage ?? this.streamUsage + } + + useCachedContent(cachedContent: CachedContent, modelParams?: ModelParams, requestOptions?: RequestOptions): void { + if (!this.apiKey) return + this.client = new GenerativeAI(this.apiKey).getGenerativeModelFromCachedContent(cachedContent, modelParams, requestOptions) + if (this.thinkingBudget !== undefined) { + ;(this.client.generationConfig as any).thinkingConfig = { + ...(this.thinkingBudget !== undefined ? { thinkingBudget: this.thinkingBudget } : {}) + } + } + } + + get useSystemInstruction(): boolean { + return typeof this.convertSystemMessageToHumanContent === 'boolean' + ? !this.convertSystemMessageToHumanContent + : this.computeUseSystemInstruction + } + + get computeUseSystemInstruction(): boolean { + // This works on models from April 2024 and later + // Vertex AI: gemini-1.5-pro and gemini-1.0-002 and later + // AI Studio: gemini-1.5-pro-latest + if (this.model === 'gemini-1.0-pro-001') { + return false + } else if (this.model.startsWith('gemini-pro-vision')) { + return false + } else if (this.model.startsWith('gemini-1.0-pro-vision')) { + return false + } else if (this.model === 'gemini-pro') { + // on AI Studio gemini-pro is still pointing at gemini-1.0-pro-001 + return false + } + return true + } + + getLsParams(options: this['ParsedCallOptions']): LangSmithParams { + return { + ls_provider: 'google_genai', + ls_model_name: this.model, + ls_model_type: 'chat', + ls_temperature: this.client.generationConfig.temperature, + ls_max_tokens: this.client.generationConfig.maxOutputTokens, + ls_stop: options.stop } } @@ -189,86 +742,36 @@ class LangchainChatGoogleGenerativeAI return 'googlegenerativeai' } - override bindTools(tools: (StructuredToolInterface | Record)[], kwargs?: Partial) { - //@ts-ignore - return this.bind({ tools: convertToGeminiTools(tools), ...kwargs }) + override bindTools( + tools: GoogleGenerativeAIToolType[], + kwargs?: Partial + ): Runnable { + return this.withConfig({ + tools: convertToolsToGenAI(tools)?.tools, + ...kwargs + }) } invocationParams(options?: this['ParsedCallOptions']): Omit { - const tools = options?.tools as GoogleGenerativeAIFunctionDeclarationsTool[] | StructuredToolInterface[] | undefined - if (Array.isArray(tools) && !tools.some((t: any) => !('lc_namespace' in t))) { - return { - tools: convertToGeminiTools(options?.tools as StructuredToolInterface[]) as any - } - } - return { - tools: options?.tools as GoogleGenerativeAIFunctionDeclarationsTool[] | undefined - } - } + const toolsAndConfig = options?.tools?.length + ? convertToolsToGenAI(options.tools, { + toolChoice: options.tool_choice, + allowedFunctionNames: options.allowedFunctionNames + }) + : undefined - convertFunctionResponse(prompts: Content[]) { - for (let i = 0; i < prompts.length; i += 1) { - if (prompts[i].role === 'function') { - if (prompts[i - 1].role === 'model') { - const toolName = prompts[i - 1].parts[0].functionCall?.name ?? '' - prompts[i].parts = [ - { - functionResponse: { - name: toolName, - response: { - name: toolName, - content: prompts[i].parts[0].text - } - } - } - ] - } - } - } - } - - setContextCache(contextCache: FlowiseGoogleAICacheManager): void { - this.contextCache = contextCache - } - - async getNumTokens(prompt: BaseMessage[]) { - const contents = convertBaseMessagesToContent(prompt, this._isMultimodalModel) - const { totalTokens } = await this.client.countTokens({ contents }) - return totalTokens - } - - async _generateNonStreaming( - prompt: Content[], - options: this['ParsedCallOptions'], - _runManager?: CallbackManagerForLLMRun - ): Promise { - //@ts-ignore - const tools = options.tools ?? [] - - this.convertFunctionResponse(prompt) - - if (tools.length > 0) { - await this.getClient(prompt, tools as Tool[]) + if (options?.responseSchema) { + this.client.generationConfig.responseSchema = options.responseSchema + this.client.generationConfig.responseMimeType = 'application/json' } else { - await this.getClient(prompt) + this.client.generationConfig.responseSchema = undefined + this.client.generationConfig.responseMimeType = this.json ? 'application/json' : undefined + } + + return { + ...(toolsAndConfig?.tools ? { tools: toolsAndConfig.tools } : {}), + ...(toolsAndConfig?.toolConfig ? { toolConfig: toolsAndConfig.toolConfig } : {}) } - const res = await this.caller.callWithOptions({ signal: options?.signal }, async () => { - let output - try { - output = await this.client.generateContent({ - contents: prompt - }) - } catch (e: any) { - if (e.message?.includes('400 Bad Request')) { - e.status = 400 - } - throw e - } - return output - }) - const generationResult = mapGenerateContentResultToChatResult(res.response) - await _runManager?.handleLLMNewToken(generationResult.generations?.length ? generationResult.generations[0].text : '') - return generationResult } async _generate( @@ -276,8 +779,20 @@ class LangchainChatGoogleGenerativeAI options: this['ParsedCallOptions'], runManager?: CallbackManagerForLLMRun ): Promise { - let prompt = convertBaseMessagesToContent(messages, this._isMultimodalModel) - prompt = checkIfEmptyContentAndSameRole(prompt) + const prompt = convertBaseMessagesToContent(messages, this._isMultimodalModel, this.useSystemInstruction) + let actualPrompt = prompt + if (prompt[0].role === 'system') { + const [systemInstruction] = prompt + this.client.systemInstruction = systemInstruction + actualPrompt = prompt.slice(1) + } + + // Ensure actualPrompt is never empty + if (actualPrompt.length === 0) { + actualPrompt = [{ role: 'user', parts: [{ text: '...' }] }] + } + + const parameters = this.invocationParams(options) // Handle streaming if (this.streaming) { @@ -299,7 +814,34 @@ class LangchainChatGoogleGenerativeAI return { generations, llmOutput: { estimatedTokenUsage: tokenUsage } } } - return this._generateNonStreaming(prompt, options, runManager) + + const res = await this.completionWithRetry({ + ...parameters, + contents: actualPrompt + }) + + let usageMetadata: UsageMetadata | undefined + if ('usageMetadata' in res.response) { + const genAIUsageMetadata = res.response.usageMetadata as { + promptTokenCount: number | undefined + candidatesTokenCount: number | undefined + totalTokenCount: number | undefined + } + usageMetadata = { + input_tokens: genAIUsageMetadata.promptTokenCount ?? 0, + output_tokens: genAIUsageMetadata.candidatesTokenCount ?? 0, + total_tokens: genAIUsageMetadata.totalTokenCount ?? 0 + } + } + + const generationResult = mapGenerateContentResultToChatResult(res.response, { + usageMetadata + }) + // may not have generations in output if there was a refusal for safety reasons, malformed function call, etc. + if (generationResult.generations?.length > 0) { + await runManager?.handleLLMNewToken(generationResult.generations[0]?.text ?? '') + } + return generationResult } async *_streamResponseChunks( @@ -307,46 +849,48 @@ class LangchainChatGoogleGenerativeAI options: this['ParsedCallOptions'], runManager?: CallbackManagerForLLMRun ): AsyncGenerator { - let prompt = convertBaseMessagesToContent(messages, this._isMultimodalModel) - prompt = checkIfEmptyContentAndSameRole(prompt) + const prompt = convertBaseMessagesToContent(messages, this._isMultimodalModel, this.useSystemInstruction) + let actualPrompt = prompt + if (prompt[0].role === 'system') { + const [systemInstruction] = prompt + this.client.systemInstruction = systemInstruction + actualPrompt = prompt.slice(1) + } + + // Ensure actualPrompt is never empty + if (actualPrompt.length === 0) { + actualPrompt = [{ role: 'user', parts: [{ text: '...' }] }] + } const parameters = this.invocationParams(options) const request = { ...parameters, - contents: prompt + contents: actualPrompt } - - const tools = options.tools ?? [] - if (tools.length > 0) { - await this.getClient(prompt, tools as Tool[]) - } else { - await this.getClient(prompt) - } - const stream = await this.caller.callWithOptions({ signal: options?.signal }, async () => { const { stream } = await this.client.generateContentStream(request) return stream }) - let usageMetadata: UsageMetadata | ICommonObject | undefined + let usageMetadata: UsageMetadata | undefined let index = 0 for await (const response of stream) { if ('usageMetadata' in response && this.streamUsage !== false && options.streamUsage !== false) { const genAIUsageMetadata = response.usageMetadata as { - promptTokenCount: number - candidatesTokenCount: number - totalTokenCount: number + promptTokenCount: number | undefined + candidatesTokenCount: number | undefined + totalTokenCount: number | undefined } if (!usageMetadata) { usageMetadata = { - input_tokens: genAIUsageMetadata.promptTokenCount, - output_tokens: genAIUsageMetadata.candidatesTokenCount, - total_tokens: genAIUsageMetadata.totalTokenCount + input_tokens: genAIUsageMetadata.promptTokenCount ?? 0, + output_tokens: genAIUsageMetadata.candidatesTokenCount ?? 0, + total_tokens: genAIUsageMetadata.totalTokenCount ?? 0 } } else { // Under the hood, LangChain combines the prompt tokens. Google returns the updated // total each time, so we need to find the difference between the tokens. - const outputTokenDiff = genAIUsageMetadata.candidatesTokenCount - (usageMetadata as ICommonObject).output_tokens + const outputTokenDiff = (genAIUsageMetadata.candidatesTokenCount ?? 0) - usageMetadata.output_tokens usageMetadata = { input_tokens: 0, output_tokens: outputTokenDiff, @@ -356,7 +900,7 @@ class LangchainChatGoogleGenerativeAI } const chunk = convertResponseContentToChatGenerationChunk(response, { - usageMetadata: usageMetadata as UsageMetadata, + usageMetadata, index }) index += 1 @@ -368,6 +912,132 @@ class LangchainChatGoogleGenerativeAI await runManager?.handleLLMNewToken(chunk.text ?? '') } } + + async completionWithRetry( + request: string | GenerateContentRequest | (string | GenerativeAIPart)[], + options?: this['ParsedCallOptions'] + ) { + return this.caller.callWithOptions({ signal: options?.signal }, async () => { + try { + return await this.client.generateContent(request) + } catch (e: any) { + // TODO: Improve error handling + if (e.message?.includes('400 Bad Request')) { + e.status = 400 + } + throw e + } + }) + } + + // eslint-disable-next-line + withStructuredOutput = Record>( + outputSchema: InteropZodType | Record, + config?: StructuredOutputMethodOptions + ): Runnable + + // eslint-disable-next-line + withStructuredOutput = Record>( + outputSchema: InteropZodType | Record, + config?: StructuredOutputMethodOptions + ): Runnable + + // eslint-disable-next-line + withStructuredOutput = Record>( + outputSchema: InteropZodType | Record, + config?: StructuredOutputMethodOptions + ): Runnable | Runnable { + const schema: InteropZodType | Record = outputSchema + const name = config?.name + const method = config?.method + const includeRaw = config?.includeRaw + if (method === 'jsonMode') { + throw new Error(`ChatGoogleGenerativeAI only supports "jsonSchema" or "functionCalling" as a method.`) + } + + let llm + let outputParser: BaseLLMOutputParser + if (method === 'functionCalling') { + let functionName = name ?? 'extract' + let tools: GoogleGenerativeAIFunctionDeclarationsTool[] + if (isInteropZodSchema(schema)) { + const jsonSchema = schemaToGenerativeAIParameters(schema) + tools = [ + { + functionDeclarations: [ + { + name: functionName, + description: jsonSchema.description ?? 'A function available to call.', + parameters: jsonSchema as GenerativeAIFunctionDeclarationSchema + } + ] + } + ] + outputParser = new GoogleGenerativeAIToolsOutputParser>({ + returnSingle: true, + keyName: functionName, + zodSchema: schema + }) + } else { + let geminiFunctionDefinition: GenerativeAIFunctionDeclaration + if (typeof schema.name === 'string' && typeof schema.parameters === 'object' && schema.parameters != null) { + geminiFunctionDefinition = schema as GenerativeAIFunctionDeclaration + geminiFunctionDefinition.parameters = removeAdditionalProperties( + schema.parameters + ) as GenerativeAIFunctionDeclarationSchema + functionName = schema.name + } else { + geminiFunctionDefinition = { + name: functionName, + description: schema.description ?? '', + parameters: removeAdditionalProperties(schema) as GenerativeAIFunctionDeclarationSchema + } + } + tools = [ + { + functionDeclarations: [geminiFunctionDefinition] + } + ] + outputParser = new GoogleGenerativeAIToolsOutputParser({ + returnSingle: true, + keyName: functionName + }) + } + llm = this.bindTools(tools).withConfig({ + allowedFunctionNames: [functionName] + }) + } else { + const jsonSchema = schemaToGenerativeAIParameters(schema) + llm = this.withConfig({ + responseSchema: jsonSchema as Schema + }) + outputParser = new JsonOutputParser() + } + + if (!includeRaw) { + return llm.pipe(outputParser).withConfig({ + runName: 'ChatGoogleGenerativeAIStructuredOutput' + }) as Runnable + } + + const parserAssign = RunnablePassthrough.assign({ + parsed: (input: any, config) => outputParser.invoke(input.raw, config) + }) + const parserNone = RunnablePassthrough.assign({ + parsed: () => null + }) + const parsedWithFallback = parserAssign.withFallbacks({ + fallbacks: [parserNone] + }) + return RunnableSequence.from([ + { + raw: llm + }, + parsedWithFallback + ]).withConfig({ + runName: 'StructuredOutputRunnable' + }) + } } export class ChatGoogleGenerativeAI extends LangchainChatGoogleGenerativeAI implements IVisionChatModal { @@ -376,15 +1046,15 @@ export class ChatGoogleGenerativeAI extends LangchainChatGoogleGenerativeAI impl multiModalOption: IMultiModalOption id: string - constructor(id: string, fields?: GoogleGenerativeAIChatInput) { + constructor(id: string, fields: GoogleGenerativeAIChatInput) { super(fields) this.id = id - this.configuredModel = fields?.modelName ?? '' + this.configuredModel = fields?.model ?? '' this.configuredMaxToken = fields?.maxOutputTokens } revertToOriginalModel(): void { - this.modelName = this.configuredModel + this.model = this.configuredModel this.maxOutputTokens = this.configuredMaxToken } @@ -393,346 +1063,6 @@ export class ChatGoogleGenerativeAI extends LangchainChatGoogleGenerativeAI impl } setVisionModel(): void { - if (this.modelName === 'gemini-1.0-pro-latest') { - this.modelName = DEFAULT_IMAGE_MODEL - this.maxOutputTokens = this.configuredMaxToken ? this.configuredMaxToken : DEFAULT_IMAGE_MAX_TOKEN - } + // pass } } - -function messageContentMedia(content: MessageContentComplex): Part { - if ('mimeType' in content && 'data' in content) { - return { - inlineData: { - mimeType: content.mimeType, - data: content.data - } - } - } - - throw new Error('Invalid media content') -} - -function getMessageAuthor(message: BaseMessage) { - const type = message._getType() - if (ChatMessage.isInstance(message)) { - return message.role - } - return message.name ?? type -} - -function convertAuthorToRole(author: string) { - switch (author.toLowerCase()) { - case 'ai': - case 'assistant': - case 'model': - return 'model' - case 'function': - case 'tool': - return 'function' - case 'system': - case 'human': - default: - return 'user' - } -} - -function convertMessageContentToParts(message: BaseMessage, isMultimodalModel: boolean): Part[] { - if (typeof message.content === 'string' && message.content !== '') { - return [{ text: message.content }] - } - - let functionCalls: FunctionCallPart[] = [] - let functionResponses: FunctionResponsePart[] = [] - let messageParts: Part[] = [] - - if ('tool_calls' in message && Array.isArray(message.tool_calls) && message.tool_calls.length > 0) { - functionCalls = message.tool_calls.map((tc) => ({ - functionCall: { - name: tc.name, - args: tc.args - } - })) - } else if (message._getType() === 'tool' && message.name && message.content) { - functionResponses = [ - { - functionResponse: { - name: message.name, - response: message.content - } - } - ] - } else if (Array.isArray(message.content)) { - messageParts = message.content.map((c) => { - if (c.type === 'text') { - return { - text: c.text - } - } - - if (c.type === 'image_url') { - if (!isMultimodalModel) { - throw new Error(`This model does not support images`) - } - let source - if (typeof c.image_url === 'string') { - source = c.image_url - } else if (typeof c.image_url === 'object' && 'url' in c.image_url) { - source = c.image_url.url - } else { - throw new Error('Please provide image as base64 encoded data URL') - } - const [dm, data] = source.split(',') - if (!dm.startsWith('data:')) { - throw new Error('Please provide image as base64 encoded data URL') - } - - const [mimeType, encoding] = dm.replace(/^data:/, '').split(';') - if (encoding !== 'base64') { - throw new Error('Please provide image as base64 encoded data URL') - } - - return { - inlineData: { - data, - mimeType - } - } - } else if (c.type === 'media') { - return messageContentMedia(c) - } else if (c.type === 'tool_use') { - return { - functionCall: { - name: c.name, - args: c.input - } - } - } - throw new Error(`Unknown content type ${(c as { type: string }).type}`) - }) - } - - return [...messageParts, ...functionCalls, ...functionResponses] -} - -/* - * This is a dedicated logic for Multi Agent Supervisor to handle the case where the content is empty, and the role is the same - */ - -function checkIfEmptyContentAndSameRole(contents: Content[]) { - let prevRole = '' - const validContents: Content[] = [] - - for (const content of contents) { - // Skip only if completely empty - if (!content.parts || !content.parts.length) { - continue - } - - // Ensure role is always either 'user' or 'model' - content.role = content.role === 'model' ? 'model' : 'user' - - // Handle consecutive messages - if (content.role === prevRole && validContents.length > 0) { - // Merge with previous content if same role - validContents[validContents.length - 1].parts.push(...content.parts) - continue - } - - validContents.push(content) - prevRole = content.role - } - - return validContents -} - -function convertBaseMessagesToContent(messages: BaseMessage[], isMultimodalModel: boolean) { - return messages.reduce<{ - content: Content[] - mergeWithPreviousContent: boolean - }>( - (acc, message, index) => { - if (!isBaseMessage(message)) { - throw new Error('Unsupported message input') - } - const author = getMessageAuthor(message) - if (author === 'system' && index !== 0) { - throw new Error('System message should be the first one') - } - const role = convertAuthorToRole(author) - - const prevContent = acc.content[acc.content.length] - if (!acc.mergeWithPreviousContent && prevContent && prevContent.role === role) { - throw new Error('Google Generative AI requires alternate messages between authors') - } - - const parts = convertMessageContentToParts(message, isMultimodalModel) - - if (acc.mergeWithPreviousContent) { - const prevContent = acc.content[acc.content.length - 1] - if (!prevContent) { - throw new Error('There was a problem parsing your system message. Please try a prompt without one.') - } - prevContent.parts.push(...parts) - - return { - mergeWithPreviousContent: false, - content: acc.content - } - } - let actualRole = role - if (actualRole === 'function' || actualRole === 'tool') { - // GenerativeAI API will throw an error if the role is not "user" or "model." - actualRole = 'user' - } - const content: Content = { - role: actualRole, - parts - } - return { - mergeWithPreviousContent: author === 'system', - content: [...acc.content, content] - } - }, - { content: [], mergeWithPreviousContent: false } - ).content -} - -function mapGenerateContentResultToChatResult( - response: EnhancedGenerateContentResponse, - extra?: { - usageMetadata: UsageMetadata | undefined - } -): ChatResult { - // if rejected or error, return empty generations with reason in filters - if (!response.candidates || response.candidates.length === 0 || !response.candidates[0]) { - return { - generations: [], - llmOutput: { - filters: response.promptFeedback - } - } - } - - const functionCalls = response.functionCalls() - const [candidate] = response.candidates - const { content, ...generationInfo } = candidate - const text = content?.parts[0]?.text ?? '' - - const generation: ChatGeneration = { - text, - message: new AIMessage({ - content: text, - tool_calls: functionCalls, - additional_kwargs: { - ...generationInfo - }, - usage_metadata: extra?.usageMetadata as any - }), - generationInfo - } - - return { - generations: [generation] - } -} - -function convertResponseContentToChatGenerationChunk( - response: EnhancedGenerateContentResponse, - extra: { - usageMetadata?: UsageMetadata | undefined - index: number - } -): ChatGenerationChunk | null { - if (!response || !response.candidates || response.candidates.length === 0) { - return null - } - const functionCalls = response.functionCalls() - const [candidate] = response.candidates - const { content, ...generationInfo } = candidate - const text = content?.parts?.[0]?.text ?? '' - - const toolCallChunks: ToolCallChunk[] = [] - if (functionCalls) { - toolCallChunks.push( - ...functionCalls.map((fc) => ({ - ...fc, - args: JSON.stringify(fc.args), - index: extra.index - })) - ) - } - return new ChatGenerationChunk({ - text, - message: new AIMessageChunk({ - content: text, - name: !content ? undefined : content.role, - tool_call_chunks: toolCallChunks, - // Each chunk can have unique "generationInfo", and merging strategy is unclear, - // so leave blank for now. - additional_kwargs: {}, - usage_metadata: extra.usageMetadata as any - }), - generationInfo - }) -} - -function zodToGeminiParameters(zodObj: any) { - // Gemini doesn't accept either the $schema or additionalProperties - // attributes, so we need to explicitly remove them. - const jsonSchema: any = zodToJsonSchema(zodObj) - // eslint-disable-next-line unused-imports/no-unused-vars - const { $schema, additionalProperties, ...rest } = jsonSchema - - // Ensure all properties have type specified - if (rest.properties) { - Object.keys(rest.properties).forEach((key) => { - const prop = rest.properties[key] - - // Handle enum types - if (prop.enum?.length) { - rest.properties[key] = { - type: 'string', - format: 'enum', - enum: prop.enum - } - } - // Handle missing type - else if (!prop.type && !prop.oneOf && !prop.anyOf && !prop.allOf) { - // Infer type from other properties - if (prop.minimum !== undefined || prop.maximum !== undefined) { - prop.type = 'number' - } else if (prop.format === 'date-time') { - prop.type = 'string' - } else if (prop.items) { - prop.type = 'array' - } else if (prop.properties) { - prop.type = 'object' - } else { - // Default to string if type can't be inferred - prop.type = 'string' - } - } - }) - } - - return rest -} - -function convertToGeminiTools(structuredTools: (StructuredToolInterface | Record)[]) { - return [ - { - functionDeclarations: structuredTools.map((structuredTool) => { - if (isStructuredTool(structuredTool)) { - const jsonSchema = zodToGeminiParameters(structuredTool.schema) - return { - name: structuredTool.name, - description: structuredTool.description, - parameters: jsonSchema - } - } - return structuredTool - }) - } - ] -} diff --git a/packages/components/nodes/chatmodels/ChatGoogleGenerativeAI/utils/common.ts b/packages/components/nodes/chatmodels/ChatGoogleGenerativeAI/utils/common.ts new file mode 100644 index 000000000..92c5f0b5a --- /dev/null +++ b/packages/components/nodes/chatmodels/ChatGoogleGenerativeAI/utils/common.ts @@ -0,0 +1,632 @@ +import { + EnhancedGenerateContentResponse, + Content, + Part, + type FunctionDeclarationsTool as GoogleGenerativeAIFunctionDeclarationsTool, + type FunctionDeclaration as GenerativeAIFunctionDeclaration, + POSSIBLE_ROLES, + FunctionCallPart, + TextPart, + FileDataPart, + InlineDataPart +} from '@google/generative-ai' +import { + AIMessage, + AIMessageChunk, + BaseMessage, + ChatMessage, + ToolMessage, + ToolMessageChunk, + MessageContent, + MessageContentComplex, + UsageMetadata, + isAIMessage, + isBaseMessage, + isToolMessage, + StandardContentBlockConverter, + parseBase64DataUrl, + convertToProviderContentBlock, + isDataContentBlock +} from '@langchain/core/messages' +import { ChatGeneration, ChatGenerationChunk, ChatResult } from '@langchain/core/outputs' +import { isLangChainTool } from '@langchain/core/utils/function_calling' +import { isOpenAITool } from '@langchain/core/language_models/base' +import { ToolCallChunk } from '@langchain/core/messages/tool' +import { v4 as uuidv4 } from 'uuid' +import { jsonSchemaToGeminiParameters, schemaToGenerativeAIParameters } from './zod_to_genai_parameters.js' +import { GoogleGenerativeAIToolType } from './types.js' + +export function getMessageAuthor(message: BaseMessage) { + const type = message._getType() + if (ChatMessage.isInstance(message)) { + return message.role + } + if (type === 'tool') { + return type + } + return message.name ?? type +} + +/** + * !!! IMPORTANT: Must return 'user' as default instead of throwing error + * https://github.com/FlowiseAI/Flowise/issues/4743 + * Maps a message type to a Google Generative AI chat author. + * @param message The message to map. + * @param model The model to use for mapping. + * @returns The message type mapped to a Google Generative AI chat author. + */ +export function convertAuthorToRole(author: string): (typeof POSSIBLE_ROLES)[number] { + switch (author) { + /** + * Note: Gemini currently is not supporting system messages + * we will convert them to human messages and merge with following + * */ + case 'supervisor': + case 'ai': + case 'model': // getMessageAuthor returns message.name. code ex.: return message.name ?? type; + return 'model' + case 'system': + return 'system' + case 'human': + return 'user' + case 'tool': + case 'function': + return 'function' + default: + return 'user' // return user as default instead of throwing error + } +} + +function messageContentMedia(content: MessageContentComplex): Part { + if ('mimeType' in content && 'data' in content) { + return { + inlineData: { + mimeType: content.mimeType, + data: content.data + } + } + } + if ('mimeType' in content && 'fileUri' in content) { + return { + fileData: { + mimeType: content.mimeType, + fileUri: content.fileUri + } + } + } + + throw new Error('Invalid media content') +} + +function inferToolNameFromPreviousMessages(message: ToolMessage | ToolMessageChunk, previousMessages: BaseMessage[]): string | undefined { + return previousMessages + .map((msg) => { + if (isAIMessage(msg)) { + return msg.tool_calls ?? [] + } + return [] + }) + .flat() + .find((toolCall) => { + return toolCall.id === message.tool_call_id + })?.name +} + +function _getStandardContentBlockConverter(isMultimodalModel: boolean) { + const standardContentBlockConverter: StandardContentBlockConverter<{ + text: TextPart + image: FileDataPart | InlineDataPart + audio: FileDataPart | InlineDataPart + file: FileDataPart | InlineDataPart | TextPart + }> = { + providerName: 'Google Gemini', + + fromStandardTextBlock(block) { + return { + text: block.text + } + }, + + fromStandardImageBlock(block): FileDataPart | InlineDataPart { + if (!isMultimodalModel) { + throw new Error('This model does not support images') + } + if (block.source_type === 'url') { + const data = parseBase64DataUrl({ dataUrl: block.url }) + if (data) { + return { + inlineData: { + mimeType: data.mime_type, + data: data.data + } + } + } else { + return { + fileData: { + mimeType: block.mime_type ?? '', + fileUri: block.url + } + } + } + } + + if (block.source_type === 'base64') { + return { + inlineData: { + mimeType: block.mime_type ?? '', + data: block.data + } + } + } + + throw new Error(`Unsupported source type: ${block.source_type}`) + }, + + fromStandardAudioBlock(block): FileDataPart | InlineDataPart { + if (!isMultimodalModel) { + throw new Error('This model does not support audio') + } + if (block.source_type === 'url') { + const data = parseBase64DataUrl({ dataUrl: block.url }) + if (data) { + return { + inlineData: { + mimeType: data.mime_type, + data: data.data + } + } + } else { + return { + fileData: { + mimeType: block.mime_type ?? '', + fileUri: block.url + } + } + } + } + + if (block.source_type === 'base64') { + return { + inlineData: { + mimeType: block.mime_type ?? '', + data: block.data + } + } + } + + throw new Error(`Unsupported source type: ${block.source_type}`) + }, + + fromStandardFileBlock(block): FileDataPart | InlineDataPart | TextPart { + if (!isMultimodalModel) { + throw new Error('This model does not support files') + } + if (block.source_type === 'text') { + return { + text: block.text + } + } + if (block.source_type === 'url') { + const data = parseBase64DataUrl({ dataUrl: block.url }) + if (data) { + return { + inlineData: { + mimeType: data.mime_type, + data: data.data + } + } + } else { + return { + fileData: { + mimeType: block.mime_type ?? '', + fileUri: block.url + } + } + } + } + + if (block.source_type === 'base64') { + return { + inlineData: { + mimeType: block.mime_type ?? '', + data: block.data + } + } + } + throw new Error(`Unsupported source type: ${block.source_type}`) + } + } + return standardContentBlockConverter +} + +function _convertLangChainContentToPart(content: MessageContentComplex, isMultimodalModel: boolean): Part | undefined { + if (isDataContentBlock(content)) { + return convertToProviderContentBlock(content, _getStandardContentBlockConverter(isMultimodalModel)) + } + + if (content.type === 'text') { + return { text: content.text } + } else if (content.type === 'executableCode') { + return { executableCode: content.executableCode } + } else if (content.type === 'codeExecutionResult') { + return { codeExecutionResult: content.codeExecutionResult } + } else if (content.type === 'image_url') { + if (!isMultimodalModel) { + throw new Error(`This model does not support images`) + } + let source + if (typeof content.image_url === 'string') { + source = content.image_url + } else if (typeof content.image_url === 'object' && 'url' in content.image_url) { + source = content.image_url.url + } else { + throw new Error('Please provide image as base64 encoded data URL') + } + const [dm, data] = source.split(',') + if (!dm.startsWith('data:')) { + throw new Error('Please provide image as base64 encoded data URL') + } + + const [mimeType, encoding] = dm.replace(/^data:/, '').split(';') + if (encoding !== 'base64') { + throw new Error('Please provide image as base64 encoded data URL') + } + + return { + inlineData: { + data, + mimeType + } + } + } else if (content.type === 'media') { + return messageContentMedia(content) + } else if (content.type === 'tool_use') { + return { + functionCall: { + name: content.name, + args: content.input + } + } + } else if ( + content.type?.includes('/') && + // Ensure it's a single slash. + content.type.split('/').length === 2 && + 'data' in content && + typeof content.data === 'string' + ) { + return { + inlineData: { + mimeType: content.type, + data: content.data + } + } + } else if ('functionCall' in content) { + // No action needed here โ€” function calls will be added later from message.tool_calls + return undefined + } else { + if ('type' in content) { + throw new Error(`Unknown content type ${content.type}`) + } else { + throw new Error(`Unknown content ${JSON.stringify(content)}`) + } + } +} + +export function convertMessageContentToParts(message: BaseMessage, isMultimodalModel: boolean, previousMessages: BaseMessage[]): Part[] { + if (isToolMessage(message)) { + const messageName = message.name ?? inferToolNameFromPreviousMessages(message, previousMessages) + if (messageName === undefined) { + throw new Error( + `Google requires a tool name for each tool call response, and we could not infer a called tool name for ToolMessage "${message.id}" from your passed messages. Please populate a "name" field on that ToolMessage explicitly.` + ) + } + + const result = Array.isArray(message.content) + ? (message.content.map((c) => _convertLangChainContentToPart(c, isMultimodalModel)).filter((p) => p !== undefined) as Part[]) + : message.content + + if (message.status === 'error') { + return [ + { + functionResponse: { + name: messageName, + // The API expects an object with an `error` field if the function call fails. + // `error` must be a valid object (not a string or array), so we wrap `message.content` here + response: { error: { details: result } } + } + } + ] + } + + return [ + { + functionResponse: { + name: messageName, + // again, can't have a string or array value for `response`, so we wrap it as an object here + response: { result } + } + } + ] + } + + let functionCalls: FunctionCallPart[] = [] + const messageParts: Part[] = [] + + if (typeof message.content === 'string' && message.content) { + messageParts.push({ text: message.content }) + } + + if (Array.isArray(message.content)) { + messageParts.push( + ...(message.content.map((c) => _convertLangChainContentToPart(c, isMultimodalModel)).filter((p) => p !== undefined) as Part[]) + ) + } + + if (isAIMessage(message) && message.tool_calls?.length) { + functionCalls = message.tool_calls.map((tc) => { + return { + functionCall: { + name: tc.name, + args: tc.args + } + } + }) + } + + return [...messageParts, ...functionCalls] +} + +export function convertBaseMessagesToContent( + messages: BaseMessage[], + isMultimodalModel: boolean, + convertSystemMessageToHumanContent: boolean = false +) { + return messages.reduce<{ + content: Content[] + mergeWithPreviousContent: boolean + }>( + (acc, message, index) => { + if (!isBaseMessage(message)) { + throw new Error('Unsupported message input') + } + const author = getMessageAuthor(message) + if (author === 'system' && index !== 0) { + throw new Error('System message should be the first one') + } + const role = convertAuthorToRole(author) + + const prevContent = acc.content[acc.content.length] + if (!acc.mergeWithPreviousContent && prevContent && prevContent.role === role) { + throw new Error('Google Generative AI requires alternate messages between authors') + } + + const parts = convertMessageContentToParts(message, isMultimodalModel, messages.slice(0, index)) + + if (acc.mergeWithPreviousContent) { + const prevContent = acc.content[acc.content.length - 1] + if (!prevContent) { + throw new Error('There was a problem parsing your system message. Please try a prompt without one.') + } + prevContent.parts.push(...parts) + + return { + mergeWithPreviousContent: false, + content: acc.content + } + } + let actualRole = role + if (actualRole === 'function' || (actualRole === 'system' && !convertSystemMessageToHumanContent)) { + // GenerativeAI API will throw an error if the role is not "user" or "model." + actualRole = 'user' + } + const content: Content = { + role: actualRole, + parts + } + return { + mergeWithPreviousContent: author === 'system' && !convertSystemMessageToHumanContent, + content: [...acc.content, content] + } + }, + { content: [], mergeWithPreviousContent: false } + ).content +} + +export function mapGenerateContentResultToChatResult( + response: EnhancedGenerateContentResponse, + extra?: { + usageMetadata: UsageMetadata | undefined + } +): ChatResult { + // if rejected or error, return empty generations with reason in filters + if (!response.candidates || response.candidates.length === 0 || !response.candidates[0]) { + return { + generations: [], + llmOutput: { + filters: response.promptFeedback + } + } + } + + const functionCalls = response.functionCalls() + const [candidate] = response.candidates + const { content: candidateContent, ...generationInfo } = candidate + let content: MessageContent | undefined + + if (Array.isArray(candidateContent?.parts) && candidateContent.parts.length === 1 && candidateContent.parts[0].text) { + content = candidateContent.parts[0].text + } else if (Array.isArray(candidateContent?.parts) && candidateContent.parts.length > 0) { + content = candidateContent.parts.map((p) => { + if ('text' in p) { + return { + type: 'text', + text: p.text + } + } else if ('executableCode' in p) { + return { + type: 'executableCode', + executableCode: p.executableCode + } + } else if ('codeExecutionResult' in p) { + return { + type: 'codeExecutionResult', + codeExecutionResult: p.codeExecutionResult + } + } + return p + }) + } else { + // no content returned - likely due to abnormal stop reason, e.g. malformed function call + content = [] + } + + let text = '' + if (typeof content === 'string') { + text = content + } else if (Array.isArray(content) && content.length > 0) { + const block = content.find((b) => 'text' in b) as { text: string } | undefined + text = block?.text ?? text + } + + const generation: ChatGeneration = { + text, + message: new AIMessage({ + content: content ?? '', + tool_calls: functionCalls?.map((fc) => { + return { + ...fc, + type: 'tool_call', + id: 'id' in fc && typeof fc.id === 'string' ? fc.id : uuidv4() + } + }), + additional_kwargs: { + ...generationInfo + }, + usage_metadata: extra?.usageMetadata + }), + generationInfo + } + + return { + generations: [generation], + llmOutput: { + tokenUsage: { + promptTokens: extra?.usageMetadata?.input_tokens, + completionTokens: extra?.usageMetadata?.output_tokens, + totalTokens: extra?.usageMetadata?.total_tokens + } + } + } +} + +export function convertResponseContentToChatGenerationChunk( + response: EnhancedGenerateContentResponse, + extra: { + usageMetadata?: UsageMetadata | undefined + index: number + } +): ChatGenerationChunk | null { + if (!response.candidates || response.candidates.length === 0) { + return null + } + const functionCalls = response.functionCalls() + const [candidate] = response.candidates + const { content: candidateContent, ...generationInfo } = candidate + let content: MessageContent | undefined + // Checks if some parts do not have text. If false, it means that the content is a string. + if (Array.isArray(candidateContent?.parts) && candidateContent.parts.every((p) => 'text' in p)) { + content = candidateContent.parts.map((p) => p.text).join('') + } else if (Array.isArray(candidateContent?.parts)) { + content = candidateContent.parts.map((p) => { + if ('text' in p) { + return { + type: 'text', + text: p.text + } + } else if ('executableCode' in p) { + return { + type: 'executableCode', + executableCode: p.executableCode + } + } else if ('codeExecutionResult' in p) { + return { + type: 'codeExecutionResult', + codeExecutionResult: p.codeExecutionResult + } + } + return p + }) + } else { + // no content returned - likely due to abnormal stop reason, e.g. malformed function call + content = [] + } + + let text = '' + if (content && typeof content === 'string') { + text = content + } else if (Array.isArray(content)) { + const block = content.find((b) => 'text' in b) as { text: string } | undefined + text = block?.text ?? '' + } + + const toolCallChunks: ToolCallChunk[] = [] + if (functionCalls) { + toolCallChunks.push( + ...functionCalls.map((fc) => ({ + ...fc, + args: JSON.stringify(fc.args), + index: extra.index, + type: 'tool_call_chunk' as const, + id: 'id' in fc && typeof fc.id === 'string' ? fc.id : uuidv4() + })) + ) + } + + return new ChatGenerationChunk({ + text, + message: new AIMessageChunk({ + content: content || '', + name: !candidateContent ? undefined : candidateContent.role, + tool_call_chunks: toolCallChunks, + // Each chunk can have unique "generationInfo", and merging strategy is unclear, + // so leave blank for now. + additional_kwargs: {}, + usage_metadata: extra.usageMetadata + }), + generationInfo + }) +} + +export function convertToGenerativeAITools(tools: GoogleGenerativeAIToolType[]): GoogleGenerativeAIFunctionDeclarationsTool[] { + if (tools.every((tool) => 'functionDeclarations' in tool && Array.isArray(tool.functionDeclarations))) { + return tools as GoogleGenerativeAIFunctionDeclarationsTool[] + } + return [ + { + functionDeclarations: tools.map((tool): GenerativeAIFunctionDeclaration => { + if (isLangChainTool(tool)) { + const jsonSchema = schemaToGenerativeAIParameters(tool.schema) + if (jsonSchema.type === 'object' && 'properties' in jsonSchema && Object.keys(jsonSchema.properties).length === 0) { + return { + name: tool.name, + description: tool.description + } + } + return { + name: tool.name, + description: tool.description, + parameters: jsonSchema + } + } + if (isOpenAITool(tool)) { + return { + name: tool.function.name, + description: tool.function.description ?? `A function available to call.`, + parameters: jsonSchemaToGeminiParameters(tool.function.parameters) + } + } + return tool as unknown as GenerativeAIFunctionDeclaration + }) + } + ] +} diff --git a/packages/components/nodes/chatmodels/ChatGoogleGenerativeAI/utils/output_parsers.ts b/packages/components/nodes/chatmodels/ChatGoogleGenerativeAI/utils/output_parsers.ts new file mode 100644 index 000000000..102596aa7 --- /dev/null +++ b/packages/components/nodes/chatmodels/ChatGoogleGenerativeAI/utils/output_parsers.ts @@ -0,0 +1,63 @@ +import { BaseLLMOutputParser, OutputParserException } from '@langchain/core/output_parsers' +import { ChatGeneration } from '@langchain/core/outputs' +import { ToolCall } from '@langchain/core/messages/tool' +import { InteropZodType, interopSafeParseAsync } from '@langchain/core/utils/types' +import { JsonOutputKeyToolsParserParamsInterop } from '@langchain/core/output_parsers/openai_tools' + +interface GoogleGenerativeAIToolsOutputParserParams> extends JsonOutputKeyToolsParserParamsInterop {} + +export class GoogleGenerativeAIToolsOutputParser = Record> extends BaseLLMOutputParser { + static lc_name() { + return 'GoogleGenerativeAIToolsOutputParser' + } + + lc_namespace = ['langchain', 'google_genai', 'output_parsers'] + + returnId = false + + /** The type of tool calls to return. */ + keyName: string + + /** Whether to return only the first tool call. */ + returnSingle = false + + zodSchema?: InteropZodType + + constructor(params: GoogleGenerativeAIToolsOutputParserParams) { + super(params) + this.keyName = params.keyName + this.returnSingle = params.returnSingle ?? this.returnSingle + this.zodSchema = params.zodSchema + } + + protected async _validateResult(result: unknown): Promise { + if (this.zodSchema === undefined) { + return result as T + } + const zodParsedResult = await interopSafeParseAsync(this.zodSchema, result) + if (zodParsedResult.success) { + return zodParsedResult.data + } else { + throw new OutputParserException( + `Failed to parse. Text: "${JSON.stringify(result, null, 2)}". Error: ${JSON.stringify(zodParsedResult.error.issues)}`, + JSON.stringify(result, null, 2) + ) + } + } + + async parseResult(generations: ChatGeneration[]): Promise { + const tools = generations.flatMap((generation) => { + const { message } = generation + if (!('tool_calls' in message) || !Array.isArray(message.tool_calls)) { + return [] + } + return message.tool_calls as ToolCall[] + }) + if (tools[0] === undefined) { + throw new Error('No parseable tool calls provided to GoogleGenerativeAIToolsOutputParser.') + } + const [tool] = tools + const validatedResult = await this._validateResult(tool.args) + return validatedResult + } +} diff --git a/packages/components/nodes/chatmodels/ChatGoogleGenerativeAI/utils/tools.ts b/packages/components/nodes/chatmodels/ChatGoogleGenerativeAI/utils/tools.ts new file mode 100644 index 000000000..a356252e1 --- /dev/null +++ b/packages/components/nodes/chatmodels/ChatGoogleGenerativeAI/utils/tools.ts @@ -0,0 +1,136 @@ +import { + Tool as GenerativeAITool, + ToolConfig, + FunctionCallingMode, + FunctionDeclaration, + FunctionDeclarationsTool, + FunctionDeclarationSchema +} from '@google/generative-ai' +import { ToolChoice } from '@langchain/core/language_models/chat_models' +import { StructuredToolInterface } from '@langchain/core/tools' +import { isLangChainTool } from '@langchain/core/utils/function_calling' +import { isOpenAITool, ToolDefinition } from '@langchain/core/language_models/base' +import { convertToGenerativeAITools } from './common.js' +import { GoogleGenerativeAIToolType } from './types.js' +import { removeAdditionalProperties } from './zod_to_genai_parameters.js' + +export function convertToolsToGenAI( + tools: GoogleGenerativeAIToolType[], + extra?: { + toolChoice?: ToolChoice + allowedFunctionNames?: string[] + } +): { + tools: GenerativeAITool[] + toolConfig?: ToolConfig +} { + // Extract function declaration processing to a separate function + const genAITools = processTools(tools) + + // Simplify tool config creation + const toolConfig = createToolConfig(genAITools, extra) + + return { tools: genAITools, toolConfig } +} + +function processTools(tools: GoogleGenerativeAIToolType[]): GenerativeAITool[] { + let functionDeclarationTools: FunctionDeclaration[] = [] + const genAITools: GenerativeAITool[] = [] + + tools.forEach((tool) => { + if (isLangChainTool(tool)) { + const [convertedTool] = convertToGenerativeAITools([tool as StructuredToolInterface]) + if (convertedTool.functionDeclarations) { + functionDeclarationTools.push(...convertedTool.functionDeclarations) + } + } else if (isOpenAITool(tool)) { + const { functionDeclarations } = convertOpenAIToolToGenAI(tool) + if (functionDeclarations) { + functionDeclarationTools.push(...functionDeclarations) + } else { + throw new Error('Failed to convert OpenAI structured tool to GenerativeAI tool') + } + } else { + genAITools.push(tool as GenerativeAITool) + } + }) + + const genAIFunctionDeclaration = genAITools.find((t) => 'functionDeclarations' in t) + if (genAIFunctionDeclaration) { + return genAITools.map((tool) => { + if (functionDeclarationTools?.length > 0 && 'functionDeclarations' in tool) { + const newTool = { + functionDeclarations: [...(tool.functionDeclarations || []), ...functionDeclarationTools] + } + // Clear the functionDeclarationTools array so it is not passed again + functionDeclarationTools = [] + return newTool + } + return tool + }) + } + + return [ + ...genAITools, + ...(functionDeclarationTools.length > 0 + ? [ + { + functionDeclarations: functionDeclarationTools + } + ] + : []) + ] +} + +function convertOpenAIToolToGenAI(tool: ToolDefinition): FunctionDeclarationsTool { + return { + functionDeclarations: [ + { + name: tool.function.name, + description: tool.function.description, + parameters: removeAdditionalProperties(tool.function.parameters) as FunctionDeclarationSchema + } + ] + } +} + +function createToolConfig( + genAITools: GenerativeAITool[], + extra?: { + toolChoice?: ToolChoice + allowedFunctionNames?: string[] + } +): ToolConfig | undefined { + if (!genAITools.length || !extra) return undefined + + const { toolChoice, allowedFunctionNames } = extra + + const modeMap: Record = { + any: FunctionCallingMode.ANY, + auto: FunctionCallingMode.AUTO, + none: FunctionCallingMode.NONE + } + + if (toolChoice && ['any', 'auto', 'none'].includes(toolChoice as string)) { + return { + functionCallingConfig: { + mode: modeMap[toolChoice as keyof typeof modeMap] ?? 'MODE_UNSPECIFIED', + allowedFunctionNames + } + } + } + + if (typeof toolChoice === 'string' || allowedFunctionNames) { + return { + functionCallingConfig: { + mode: FunctionCallingMode.ANY, + allowedFunctionNames: [ + ...(allowedFunctionNames ?? []), + ...(toolChoice && typeof toolChoice === 'string' ? [toolChoice] : []) + ] + } + } + } + + return undefined +} diff --git a/packages/components/nodes/chatmodels/ChatGoogleGenerativeAI/utils/types.ts b/packages/components/nodes/chatmodels/ChatGoogleGenerativeAI/utils/types.ts new file mode 100644 index 000000000..f784f635f --- /dev/null +++ b/packages/components/nodes/chatmodels/ChatGoogleGenerativeAI/utils/types.ts @@ -0,0 +1,12 @@ +import { + CodeExecutionTool, + FunctionDeclarationsTool as GoogleGenerativeAIFunctionDeclarationsTool, + GoogleSearchRetrievalTool +} from '@google/generative-ai' +import { BindToolsInput } from '@langchain/core/language_models/chat_models' + +export type GoogleGenerativeAIToolType = + | BindToolsInput + | GoogleGenerativeAIFunctionDeclarationsTool + | CodeExecutionTool + | GoogleSearchRetrievalTool diff --git a/packages/components/nodes/chatmodels/ChatGoogleGenerativeAI/utils/zod_to_genai_parameters.ts b/packages/components/nodes/chatmodels/ChatGoogleGenerativeAI/utils/zod_to_genai_parameters.ts new file mode 100644 index 000000000..020c6359f --- /dev/null +++ b/packages/components/nodes/chatmodels/ChatGoogleGenerativeAI/utils/zod_to_genai_parameters.ts @@ -0,0 +1,67 @@ +import { + type FunctionDeclarationSchema as GenerativeAIFunctionDeclarationSchema, + type SchemaType as FunctionDeclarationSchemaType +} from '@google/generative-ai' +import { InteropZodType, isInteropZodSchema } from '@langchain/core/utils/types' +import { type JsonSchema7Type, toJsonSchema } from '@langchain/core/utils/json_schema' + +export interface GenerativeAIJsonSchema extends Record { + properties?: Record + type: FunctionDeclarationSchemaType +} + +export interface GenerativeAIJsonSchemaDirty extends GenerativeAIJsonSchema { + properties?: Record + additionalProperties?: boolean +} + +export function removeAdditionalProperties(obj: Record): GenerativeAIJsonSchema { + if (typeof obj === 'object' && obj !== null) { + const newObj = { ...obj } + + if ('additionalProperties' in newObj) { + delete newObj.additionalProperties + } + if ('$schema' in newObj) { + delete newObj.$schema + } + if ('strict' in newObj) { + delete newObj.strict + } + + for (const key in newObj) { + if (key in newObj) { + if (Array.isArray(newObj[key])) { + newObj[key] = newObj[key].map(removeAdditionalProperties) + } else if (typeof newObj[key] === 'object' && newObj[key] !== null) { + newObj[key] = removeAdditionalProperties(newObj[key]) + } + } + } + + return newObj as GenerativeAIJsonSchema + } + + return obj as GenerativeAIJsonSchema +} + +export function schemaToGenerativeAIParameters = Record>( + schema: InteropZodType | JsonSchema7Type +): GenerativeAIFunctionDeclarationSchema { + // GenerativeAI doesn't accept either the $schema or additionalProperties + // attributes, so we need to explicitly remove them. + const jsonSchema = removeAdditionalProperties(isInteropZodSchema(schema) ? toJsonSchema(schema) : schema) + const { _schema, ...rest } = jsonSchema + + return rest as GenerativeAIFunctionDeclarationSchema +} + +export function jsonSchemaToGeminiParameters(schema: Record): GenerativeAIFunctionDeclarationSchema { + // Gemini doesn't accept either the $schema or additionalProperties + // attributes, so we need to explicitly remove them. + + const jsonSchema = removeAdditionalProperties(schema as GenerativeAIJsonSchemaDirty) + const { _schema, ...rest } = jsonSchema + + return rest as GenerativeAIFunctionDeclarationSchema +} diff --git a/packages/components/nodes/chatmodels/ChatGoogleVertexAI/ChatGoogleVertexAI.ts b/packages/components/nodes/chatmodels/ChatGoogleVertexAI/ChatGoogleVertexAI.ts index 44fed0b6a..00641da1d 100644 --- a/packages/components/nodes/chatmodels/ChatGoogleVertexAI/ChatGoogleVertexAI.ts +++ b/packages/components/nodes/chatmodels/ChatGoogleVertexAI/ChatGoogleVertexAI.ts @@ -1,5 +1,6 @@ import { BaseCache } from '@langchain/core/caches' -import { ChatVertexAI as LcChatVertexAI, ChatVertexAIInput } from '@langchain/google-vertexai' +import { ChatVertexAIInput, ChatVertexAI as LcChatVertexAI } from '@langchain/google-vertexai' +import { buildGoogleCredentials } from '../../../src/google-utils' import { ICommonObject, IMultiModalOption, @@ -9,8 +10,8 @@ import { INodeParams, IVisionChatModal } from '../../../src/Interface' -import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' -import { getModels, MODEL_TYPE } from '../../../src/modelLoader' +import { getModels, getRegions, MODEL_TYPE } from '../../../src/modelLoader' +import { getBaseClasses } from '../../../src/utils' const DEFAULT_IMAGE_MAX_TOKEN = 8192 const DEFAULT_IMAGE_MODEL = 'gemini-1.5-flash-latest' @@ -65,7 +66,7 @@ class GoogleVertexAI_ChatModels implements INode { constructor() { this.label = 'ChatGoogleVertexAI' this.name = 'chatGoogleVertexAI' - this.version = 5.1 + this.version = 5.3 this.type = 'ChatGoogleVertexAI' this.icon = 'GoogleVertex.svg' this.category = 'Chat Models' @@ -87,6 +88,14 @@ class GoogleVertexAI_ChatModels implements INode { type: 'BaseCache', optional: true }, + { + label: 'Region', + description: 'Region to use for the model.', + name: 'region', + type: 'asyncOptions', + loadMethod: 'listRegions', + optional: true + }, { label: 'Model Name', name: 'modelName', @@ -151,6 +160,16 @@ class GoogleVertexAI_ChatModels implements INode { step: 1, optional: true, additionalParams: true + }, + { + label: 'Thinking Budget', + name: 'thinkingBudget', + type: 'number', + description: 'Number of tokens to use for thinking process (0 to disable)', + step: 1, + placeholder: '1024', + optional: true, + additionalParams: true } ] } @@ -159,31 +178,13 @@ class GoogleVertexAI_ChatModels implements INode { loadMethods = { async listModels(): Promise { return await getModels(MODEL_TYPE.CHAT, 'chatGoogleVertexAI') + }, + async listRegions(): Promise { + return await getRegions(MODEL_TYPE.CHAT, 'chatGoogleVertexAI') } } async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { - const credentialData = await getCredentialData(nodeData.credential ?? '', options) - const googleApplicationCredentialFilePath = getCredentialParam('googleApplicationCredentialFilePath', credentialData, nodeData) - const googleApplicationCredential = getCredentialParam('googleApplicationCredential', credentialData, nodeData) - const projectID = getCredentialParam('projectID', credentialData, nodeData) - - const authOptions: ICommonObject = {} - if (Object.keys(credentialData).length !== 0) { - if (!googleApplicationCredentialFilePath && !googleApplicationCredential) - throw new Error('Please specify your Google Application Credential') - if (!googleApplicationCredentialFilePath && !googleApplicationCredential) - throw new Error( - 'Error: More than one component has been inputted. Please use only one of the following: Google Application Credential File Path or Google Credential JSON Object' - ) - if (googleApplicationCredentialFilePath && !googleApplicationCredential) - authOptions.keyFile = googleApplicationCredentialFilePath - else if (!googleApplicationCredentialFilePath && googleApplicationCredential) - authOptions.credentials = JSON.parse(googleApplicationCredential) - - if (projectID) authOptions.projectId = projectID - } - const temperature = nodeData.inputs?.temperature as string const modelName = nodeData.inputs?.modelName as string const customModelName = nodeData.inputs?.customModelName as string @@ -192,6 +193,8 @@ class GoogleVertexAI_ChatModels implements INode { const cache = nodeData.inputs?.cache as BaseCache const topK = nodeData.inputs?.topK as string const streaming = nodeData.inputs?.streaming as boolean + const thinkingBudget = nodeData.inputs?.thinkingBudget as string + const region = nodeData.inputs?.region as string const allowImageUploads = nodeData.inputs?.allowImageUploads as boolean @@ -206,11 +209,16 @@ class GoogleVertexAI_ChatModels implements INode { modelName: customModelName || modelName, streaming: streaming ?? true } - if (Object.keys(authOptions).length !== 0) obj.authOptions = authOptions + + const authOptions = await buildGoogleCredentials(nodeData, options) + if (authOptions && Object.keys(authOptions).length !== 0) obj.authOptions = authOptions + if (maxOutputTokens) obj.maxOutputTokens = parseInt(maxOutputTokens, 10) if (topP) obj.topP = parseFloat(topP) if (cache) obj.cache = cache if (topK) obj.topK = parseFloat(topK) + if (thinkingBudget) obj.thinkingBudget = parseInt(thinkingBudget, 10) + if (region) obj.location = region const model = new ChatVertexAI(nodeData.id, obj) model.setMultiModalOption(multiModalOption) diff --git a/packages/components/nodes/chatmodels/ChatHuggingFace/ChatHuggingFace.ts b/packages/components/nodes/chatmodels/ChatHuggingFace/ChatHuggingFace.ts index 29d1b74e5..4cda05716 100644 --- a/packages/components/nodes/chatmodels/ChatHuggingFace/ChatHuggingFace.ts +++ b/packages/components/nodes/chatmodels/ChatHuggingFace/ChatHuggingFace.ts @@ -41,15 +41,17 @@ class ChatHuggingFace_ChatModels implements INode { label: 'Model', name: 'model', type: 'string', - description: 'If using own inference endpoint, leave this blank', - placeholder: 'gpt2' + description: + 'Model name (e.g., deepseek-ai/DeepSeek-V3.2-Exp:novita). If model includes provider (:) or using router endpoint, leave Endpoint blank.', + placeholder: 'deepseek-ai/DeepSeek-V3.2-Exp:novita' }, { label: 'Endpoint', name: 'endpoint', type: 'string', placeholder: 'https://xyz.eu-west-1.aws.endpoints.huggingface.cloud/gpt2', - description: 'Using your own inference endpoint', + description: + 'Custom inference endpoint (optional). Not needed for models with providers (:) or router endpoints. Leave blank to use Inference Providers.', optional: true }, { @@ -103,7 +105,7 @@ class ChatHuggingFace_ChatModels implements INode { type: 'string', rows: 4, placeholder: 'AI assistant:', - description: 'Sets the stop sequences to use. Use comma to seperate different sequences.', + description: 'Sets the stop sequences to use. Use comma to separate different sequences.', optional: true, additionalParams: true } @@ -124,6 +126,15 @@ class ChatHuggingFace_ChatModels implements INode { const credentialData = await getCredentialData(nodeData.credential ?? '', options) const huggingFaceApiKey = getCredentialParam('huggingFaceApiKey', credentialData, nodeData) + if (!huggingFaceApiKey) { + console.error('[ChatHuggingFace] API key validation failed: No API key found') + throw new Error('HuggingFace API key is required. Please configure it in the credential settings.') + } + + if (!huggingFaceApiKey.startsWith('hf_')) { + console.warn('[ChatHuggingFace] API key format warning: Key does not start with "hf_"') + } + const obj: Partial = { model, apiKey: huggingFaceApiKey diff --git a/packages/components/nodes/chatmodels/ChatHuggingFace/core.ts b/packages/components/nodes/chatmodels/ChatHuggingFace/core.ts index 2cf2de25d..522734cda 100644 --- a/packages/components/nodes/chatmodels/ChatHuggingFace/core.ts +++ b/packages/components/nodes/chatmodels/ChatHuggingFace/core.ts @@ -56,9 +56,9 @@ export class HuggingFaceInference extends LLM implements HFInput { this.apiKey = fields?.apiKey ?? getEnvironmentVariable('HUGGINGFACEHUB_API_KEY') this.endpointUrl = fields?.endpointUrl this.includeCredentials = fields?.includeCredentials - if (!this.apiKey) { + if (!this.apiKey || this.apiKey.trim() === '') { throw new Error( - 'Please set an API key for HuggingFace Hub in the environment variable HUGGINGFACEHUB_API_KEY or in the apiKey field of the HuggingFaceInference constructor.' + 'Please set an API key for HuggingFace Hub. Either configure it in the credential settings in the UI, or set the environment variable HUGGINGFACEHUB_API_KEY.' ) } } @@ -68,19 +68,21 @@ export class HuggingFaceInference extends LLM implements HFInput { } invocationParams(options?: this['ParsedCallOptions']) { - return { - model: this.model, - parameters: { - // make it behave similar to openai, returning only the generated text - return_full_text: false, - temperature: this.temperature, - max_new_tokens: this.maxTokens, - stop: options?.stop ?? this.stopSequences, - top_p: this.topP, - top_k: this.topK, - repetition_penalty: this.frequencyPenalty - } + // Return parameters compatible with chatCompletion API (OpenAI-compatible format) + const params: any = { + temperature: this.temperature, + max_tokens: this.maxTokens, + stop: options?.stop ?? this.stopSequences, + top_p: this.topP } + // Include optional parameters if they are defined + if (this.topK !== undefined) { + params.top_k = this.topK + } + if (this.frequencyPenalty !== undefined) { + params.frequency_penalty = this.frequencyPenalty + } + return params } async *_streamResponseChunks( @@ -88,51 +90,109 @@ export class HuggingFaceInference extends LLM implements HFInput { options: this['ParsedCallOptions'], runManager?: CallbackManagerForLLMRun ): AsyncGenerator { - const hfi = await this._prepareHFInference() - const stream = await this.caller.call(async () => - hfi.textGenerationStream({ - ...this.invocationParams(options), - inputs: prompt - }) - ) - for await (const chunk of stream) { - const token = chunk.token.text - yield new GenerationChunk({ text: token, generationInfo: chunk }) - await runManager?.handleLLMNewToken(token ?? '') - - // stream is done - if (chunk.generated_text) - yield new GenerationChunk({ - text: '', - generationInfo: { finished: true } + try { + const client = await this._prepareHFInference() + const stream = await this.caller.call(async () => + client.chatCompletionStream({ + model: this.model, + messages: [{ role: 'user', content: prompt }], + ...this.invocationParams(options) }) + ) + for await (const chunk of stream) { + const token = chunk.choices[0]?.delta?.content || '' + if (token) { + yield new GenerationChunk({ text: token, generationInfo: chunk }) + await runManager?.handleLLMNewToken(token) + } + // stream is done when finish_reason is set + if (chunk.choices[0]?.finish_reason) { + yield new GenerationChunk({ + text: '', + generationInfo: { finished: true } + }) + break + } + } + } catch (error: any) { + console.error('[ChatHuggingFace] Error in _streamResponseChunks:', error) + // Provide more helpful error messages + if (error?.message?.includes('endpointUrl') || error?.message?.includes('third-party provider')) { + throw new Error( + `Cannot use custom endpoint with model "${this.model}" that includes a provider. Please leave the Endpoint field blank in the UI. Original error: ${error.message}` + ) + } + throw error } } /** @ignore */ async _call(prompt: string, options: this['ParsedCallOptions']): Promise { - const hfi = await this._prepareHFInference() - const args = { ...this.invocationParams(options), inputs: prompt } - const res = await this.caller.callWithOptions({ signal: options.signal }, hfi.textGeneration.bind(hfi), args) - return res.generated_text + try { + const client = await this._prepareHFInference() + // Use chatCompletion for chat models (v4 supports conversational models via Inference Providers) + const args = { + model: this.model, + messages: [{ role: 'user', content: prompt }], + ...this.invocationParams(options) + } + const res = await this.caller.callWithOptions({ signal: options.signal }, client.chatCompletion.bind(client), args) + const content = res.choices[0]?.message?.content || '' + if (!content) { + console.error('[ChatHuggingFace] No content in response:', JSON.stringify(res)) + throw new Error(`No content received from HuggingFace API. Response: ${JSON.stringify(res)}`) + } + return content + } catch (error: any) { + console.error('[ChatHuggingFace] Error in _call:', error.message) + // Provide more helpful error messages + if (error?.message?.includes('endpointUrl') || error?.message?.includes('third-party provider')) { + throw new Error( + `Cannot use custom endpoint with model "${this.model}" that includes a provider. Please leave the Endpoint field blank in the UI. Original error: ${error.message}` + ) + } + if (error?.message?.includes('Invalid username or password') || error?.message?.includes('authentication')) { + throw new Error( + `HuggingFace API authentication failed. Please verify your API key is correct and starts with "hf_". Original error: ${error.message}` + ) + } + throw error + } } /** @ignore */ private async _prepareHFInference() { - const { HfInference } = await HuggingFaceInference.imports() - const hfi = new HfInference(this.apiKey, { - includeCredentials: this.includeCredentials - }) - return this.endpointUrl ? hfi.endpoint(this.endpointUrl) : hfi + if (!this.apiKey || this.apiKey.trim() === '') { + console.error('[ChatHuggingFace] API key validation failed: Empty or undefined') + throw new Error('HuggingFace API key is required. Please configure it in the credential settings.') + } + + const { InferenceClient } = await HuggingFaceInference.imports() + // Use InferenceClient for chat models (works better with Inference Providers) + const client = new InferenceClient(this.apiKey) + + // Don't override endpoint if model uses a provider (contains ':') or if endpoint is router-based + // When using Inference Providers, endpoint should be left blank - InferenceClient handles routing automatically + if ( + this.endpointUrl && + !this.model.includes(':') && + !this.endpointUrl.includes('/v1/chat/completions') && + !this.endpointUrl.includes('router.huggingface.co') + ) { + return client.endpoint(this.endpointUrl) + } + + // Return client without endpoint override - InferenceClient will use Inference Providers automatically + return client } /** @ignore */ static async imports(): Promise<{ - HfInference: typeof import('@huggingface/inference').HfInference + InferenceClient: typeof import('@huggingface/inference').InferenceClient }> { try { - const { HfInference } = await import('@huggingface/inference') - return { HfInference } + const { InferenceClient } = await import('@huggingface/inference') + return { InferenceClient } } catch (e) { throw new Error('Please install huggingface as a dependency with, e.g. `pnpm install @huggingface/inference`') } diff --git a/packages/components/nodes/chatmodels/ChatIBMWatsonx/ChatIBMWatsonx.ts b/packages/components/nodes/chatmodels/ChatIBMWatsonx/ChatIBMWatsonx.ts index f4655ace6..00adc75fb 100644 --- a/packages/components/nodes/chatmodels/ChatIBMWatsonx/ChatIBMWatsonx.ts +++ b/packages/components/nodes/chatmodels/ChatIBMWatsonx/ChatIBMWatsonx.ts @@ -161,12 +161,13 @@ class ChatIBMWatsonx_ChatModels implements INode { watsonxAIBearerToken } - const obj: ChatWatsonxInput & WatsonxAuth = { + const obj = { ...auth, streaming: streaming ?? true, model: modelName, temperature: temperature ? parseFloat(temperature) : undefined - } + } as ChatWatsonxInput & WatsonxAuth + if (cache) obj.cache = cache if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10) if (frequencyPenalty) obj.frequencyPenalty = parseInt(frequencyPenalty, 10) diff --git a/packages/components/nodes/chatmodels/ChatLitellm/ChatLitellm.ts b/packages/components/nodes/chatmodels/ChatLitellm/ChatLitellm.ts index 352f883c6..78fc40ec2 100644 --- a/packages/components/nodes/chatmodels/ChatLitellm/ChatLitellm.ts +++ b/packages/components/nodes/chatmodels/ChatLitellm/ChatLitellm.ts @@ -124,7 +124,10 @@ class ChatLitellm_ChatModels implements INode { if (topP) obj.topP = parseFloat(topP) if (timeout) obj.timeout = parseInt(timeout, 10) if (cache) obj.cache = cache - if (apiKey) obj.openAIApiKey = apiKey + if (apiKey) { + obj.openAIApiKey = apiKey + obj.apiKey = apiKey + } const model = new ChatOpenAI(obj) diff --git a/packages/components/nodes/chatmodels/ChatLocalAI/ChatLocalAI.ts b/packages/components/nodes/chatmodels/ChatLocalAI/ChatLocalAI.ts index 3ce0efdfa..4040adc8f 100644 --- a/packages/components/nodes/chatmodels/ChatLocalAI/ChatLocalAI.ts +++ b/packages/components/nodes/chatmodels/ChatLocalAI/ChatLocalAI.ts @@ -111,6 +111,7 @@ class ChatLocalAI_ChatModels implements INode { temperature: parseFloat(temperature), modelName, openAIApiKey: 'sk-', + apiKey: 'sk-', streaming: streaming ?? true } @@ -118,7 +119,10 @@ class ChatLocalAI_ChatModels implements INode { if (topP) obj.topP = parseFloat(topP) if (timeout) obj.timeout = parseInt(timeout, 10) if (cache) obj.cache = cache - if (localAIApiKey) obj.openAIApiKey = localAIApiKey + if (localAIApiKey) { + obj.openAIApiKey = localAIApiKey + obj.apiKey = localAIApiKey + } if (basePath) obj.configuration = { baseURL: basePath } const model = new ChatOpenAI(obj) diff --git a/packages/components/nodes/chatmodels/ChatNvdiaNIM/ChatNvdiaNIM.ts b/packages/components/nodes/chatmodels/ChatNvdiaNIM/ChatNvdiaNIM.ts index b4636ad3d..59f57c485 100644 --- a/packages/components/nodes/chatmodels/ChatNvdiaNIM/ChatNvdiaNIM.ts +++ b/packages/components/nodes/chatmodels/ChatNvdiaNIM/ChatNvdiaNIM.ts @@ -17,9 +17,9 @@ class ChatNvdiaNIM_ChatModels implements INode { constructor() { this.label = 'Chat NVIDIA NIM' - this.name = 'Chat NVIDIA NIM' + this.name = 'chatNvidiaNIM' this.version = 1.1 - this.type = 'Chat NVIDIA NIM' + this.type = 'ChatNvidiaNIM' this.icon = 'nvdia.svg' this.category = 'Chat Models' this.description = 'Wrapper around NVIDIA NIM Inference API' @@ -137,6 +137,7 @@ class ChatNvdiaNIM_ChatModels implements INode { temperature: parseFloat(temperature), modelName, openAIApiKey: nvidiaNIMApiKey ?? 'sk-', + apiKey: nvidiaNIMApiKey ?? 'sk-', streaming: streaming ?? true } diff --git a/packages/components/nodes/chatmodels/ChatOpenAI/ChatOpenAI.ts b/packages/components/nodes/chatmodels/ChatOpenAI/ChatOpenAI.ts index 62c06d900..a7421dfda 100644 --- a/packages/components/nodes/chatmodels/ChatOpenAI/ChatOpenAI.ts +++ b/packages/components/nodes/chatmodels/ChatOpenAI/ChatOpenAI.ts @@ -1,10 +1,11 @@ -import { ChatOpenAI as LangchainChatOpenAI, ChatOpenAIFields, OpenAIClient } from '@langchain/openai' +import { ChatOpenAI as LangchainChatOpenAI, ChatOpenAIFields } from '@langchain/openai' import { BaseCache } from '@langchain/core/caches' import { ICommonObject, IMultiModalOption, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../src/Interface' import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' import { ChatOpenAI } from './FlowiseChatOpenAI' import { getModels, MODEL_TYPE } from '../../../src/modelLoader' import { HttpsProxyAgent } from 'https-proxy-agent' +import { OpenAI as OpenAIClient } from 'openai' class ChatOpenAI_ChatModels implements INode { label: string @@ -21,7 +22,7 @@ class ChatOpenAI_ChatModels implements INode { constructor() { this.label = 'ChatOpenAI' this.name = 'chatOpenAI' - this.version = 8.2 + this.version = 8.3 this.type = 'ChatOpenAI' this.icon = 'openai.svg' this.category = 'Chat Models' @@ -176,9 +177,18 @@ class ChatOpenAI_ChatModels implements INode { allowImageUploads: true } }, + { + label: 'Reasoning', + description: 'Whether the model supports reasoning. Only applicable for reasoning models.', + name: 'reasoning', + type: 'boolean', + default: false, + optional: true, + additionalParams: true + }, { label: 'Reasoning Effort', - description: 'Constrains effort on reasoning for reasoning models. Only applicable for o1 and o3 models.', + description: 'Constrains effort on reasoning for reasoning models', name: 'reasoningEffort', type: 'options', options: [ @@ -195,9 +205,34 @@ class ChatOpenAI_ChatModels implements INode { name: 'high' } ], - default: 'medium', - optional: false, - additionalParams: true + additionalParams: true, + show: { + reasoning: true + } + }, + { + label: 'Reasoning Summary', + description: `A summary of the reasoning performed by the model. This can be useful for debugging and understanding the model's reasoning process`, + name: 'reasoningSummary', + type: 'options', + options: [ + { + label: 'Auto', + name: 'auto' + }, + { + label: 'Concise', + name: 'concise' + }, + { + label: 'Detailed', + name: 'detailed' + } + ], + additionalParams: true, + show: { + reasoning: true + } } ] } @@ -223,7 +258,8 @@ class ChatOpenAI_ChatModels implements INode { const basePath = nodeData.inputs?.basepath as string const proxyUrl = nodeData.inputs?.proxyUrl as string const baseOptions = nodeData.inputs?.baseOptions - const reasoningEffort = nodeData.inputs?.reasoningEffort as OpenAIClient.Chat.ChatCompletionReasoningEffort + const reasoningEffort = nodeData.inputs?.reasoningEffort as OpenAIClient.ReasoningEffort | null + const reasoningSummary = nodeData.inputs?.reasoningSummary as 'auto' | 'concise' | 'detailed' | null const allowImageUploads = nodeData.inputs?.allowImageUploads as boolean const imageResolution = nodeData.inputs?.imageResolution as string @@ -240,15 +276,10 @@ class ChatOpenAI_ChatModels implements INode { temperature: parseFloat(temperature), modelName, openAIApiKey, + apiKey: openAIApiKey, streaming: streaming ?? true } - if (modelName.includes('o3') || modelName.includes('o1')) { - delete obj.temperature - } - if ((modelName.includes('o1') || modelName.includes('o3')) && reasoningEffort) { - obj.reasoningEffort = reasoningEffort - } if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10) if (topP) obj.topP = parseFloat(topP) if (frequencyPenalty) obj.frequencyPenalty = parseFloat(frequencyPenalty) @@ -261,6 +292,19 @@ class ChatOpenAI_ChatModels implements INode { } if (strictToolCalling) obj.supportsStrictToolCalling = strictToolCalling + if (modelName.includes('o1') || modelName.includes('o3') || modelName.includes('gpt-5')) { + delete obj.temperature + delete obj.stop + const reasoning: OpenAIClient.Reasoning = {} + if (reasoningEffort) { + reasoning.effort = reasoningEffort + } + if (reasoningSummary) { + reasoning.summary = reasoningSummary + } + obj.reasoning = reasoning + } + let parsedBaseOptions: any | undefined = undefined if (baseOptions) { diff --git a/packages/components/nodes/chatmodels/ChatOpenAI/FlowiseChatOpenAI.ts b/packages/components/nodes/chatmodels/ChatOpenAI/FlowiseChatOpenAI.ts index adb57f312..cce58c2ca 100644 --- a/packages/components/nodes/chatmodels/ChatOpenAI/FlowiseChatOpenAI.ts +++ b/packages/components/nodes/chatmodels/ChatOpenAI/FlowiseChatOpenAI.ts @@ -5,6 +5,7 @@ export class ChatOpenAI extends LangchainChatOpenAI implements IVisionChatModal configuredModel: string configuredMaxToken?: number multiModalOption: IMultiModalOption + builtInTools: Record[] = [] id: string constructor(id: string, fields?: ChatOpenAIFields) { @@ -15,7 +16,7 @@ export class ChatOpenAI extends LangchainChatOpenAI implements IVisionChatModal } revertToOriginalModel(): void { - this.modelName = this.configuredModel + this.model = this.configuredModel this.maxTokens = this.configuredMaxToken } @@ -26,4 +27,8 @@ export class ChatOpenAI extends LangchainChatOpenAI implements IVisionChatModal setVisionModel(): void { // pass } + + addBuiltInTools(builtInTool: Record): void { + this.builtInTools.push(builtInTool) + } } diff --git a/packages/components/nodes/chatmodels/ChatOpenAICustom/ChatOpenAICustom.ts b/packages/components/nodes/chatmodels/ChatOpenAICustom/ChatOpenAICustom.ts index b076b461f..9c450ba01 100644 --- a/packages/components/nodes/chatmodels/ChatOpenAICustom/ChatOpenAICustom.ts +++ b/packages/components/nodes/chatmodels/ChatOpenAICustom/ChatOpenAICustom.ts @@ -137,6 +137,7 @@ class ChatOpenAICustom_ChatModels implements INode { temperature: parseFloat(temperature), modelName, openAIApiKey, + apiKey: openAIApiKey, streaming: streaming ?? true } diff --git a/packages/components/nodes/chatmodels/ChatOpenRouter/ChatOpenRouter.ts b/packages/components/nodes/chatmodels/ChatOpenRouter/ChatOpenRouter.ts index 4ac3c4f4c..4defd19ed 100644 --- a/packages/components/nodes/chatmodels/ChatOpenRouter/ChatOpenRouter.ts +++ b/packages/components/nodes/chatmodels/ChatOpenRouter/ChatOpenRouter.ts @@ -1,7 +1,8 @@ -import { ChatOpenAI, ChatOpenAIFields } from '@langchain/openai' +import { ChatOpenAI as LangchainChatOpenAI, ChatOpenAIFields } from '@langchain/openai' import { BaseCache } from '@langchain/core/caches' -import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' +import { ICommonObject, IMultiModalOption, INode, INodeData, INodeParams } from '../../../src/Interface' import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' +import { ChatOpenRouter } from './FlowiseChatOpenRouter' class ChatOpenRouter_ChatModels implements INode { label: string @@ -23,7 +24,7 @@ class ChatOpenRouter_ChatModels implements INode { this.icon = 'openRouter.svg' this.category = 'Chat Models' this.description = 'Wrapper around Open Router Inference API' - this.baseClasses = [this.type, ...getBaseClasses(ChatOpenAI)] + this.baseClasses = [this.type, ...getBaseClasses(LangchainChatOpenAI)] this.credential = { label: 'Connect Credential', name: 'credential', @@ -114,6 +115,40 @@ class ChatOpenRouter_ChatModels implements INode { type: 'json', optional: true, additionalParams: true + }, + { + label: 'Allow Image Uploads', + name: 'allowImageUploads', + type: 'boolean', + description: + 'Allow image input. Refer to the docs for more details.', + default: false, + optional: true + }, + { + label: 'Image Resolution', + description: 'This parameter controls the resolution in which the model views the image.', + name: 'imageResolution', + type: 'options', + options: [ + { + label: 'Low', + name: 'low' + }, + { + label: 'High', + name: 'high' + }, + { + label: 'Auto', + name: 'auto' + } + ], + default: 'low', + optional: false, + show: { + allowImageUploads: true + } } ] } @@ -130,6 +165,8 @@ class ChatOpenRouter_ChatModels implements INode { const basePath = (nodeData.inputs?.basepath as string) || 'https://openrouter.ai/api/v1' const baseOptions = nodeData.inputs?.baseOptions const cache = nodeData.inputs?.cache as BaseCache + const allowImageUploads = nodeData.inputs?.allowImageUploads as boolean + const imageResolution = nodeData.inputs?.imageResolution as string const credentialData = await getCredentialData(nodeData.credential ?? '', options) const openRouterApiKey = getCredentialParam('openRouterApiKey', credentialData, nodeData) @@ -138,6 +175,7 @@ class ChatOpenRouter_ChatModels implements INode { temperature: parseFloat(temperature), modelName, openAIApiKey: openRouterApiKey, + apiKey: openRouterApiKey, streaming: streaming ?? true } @@ -154,7 +192,7 @@ class ChatOpenRouter_ChatModels implements INode { try { parsedBaseOptions = typeof baseOptions === 'object' ? baseOptions : JSON.parse(baseOptions) } catch (exception) { - throw new Error("Invalid JSON in the ChatCerebras's BaseOptions: " + exception) + throw new Error("Invalid JSON in the ChatOpenRouter's BaseOptions: " + exception) } } @@ -165,7 +203,15 @@ class ChatOpenRouter_ChatModels implements INode { } } - const model = new ChatOpenAI(obj) + const multiModalOption: IMultiModalOption = { + image: { + allowImageUploads: allowImageUploads ?? false, + imageResolution + } + } + + const model = new ChatOpenRouter(nodeData.id, obj) + model.setMultiModalOption(multiModalOption) return model } } diff --git a/packages/components/nodes/chatmodels/ChatOpenRouter/FlowiseChatOpenRouter.ts b/packages/components/nodes/chatmodels/ChatOpenRouter/FlowiseChatOpenRouter.ts new file mode 100644 index 000000000..bca0c5d16 --- /dev/null +++ b/packages/components/nodes/chatmodels/ChatOpenRouter/FlowiseChatOpenRouter.ts @@ -0,0 +1,29 @@ +import { ChatOpenAI as LangchainChatOpenAI, ChatOpenAIFields } from '@langchain/openai' +import { IMultiModalOption, IVisionChatModal } from '../../../src' + +export class ChatOpenRouter extends LangchainChatOpenAI implements IVisionChatModal { + configuredModel: string + configuredMaxToken?: number + multiModalOption: IMultiModalOption + id: string + + constructor(id: string, fields?: ChatOpenAIFields) { + super(fields) + this.id = id + this.configuredModel = fields?.modelName ?? '' + this.configuredMaxToken = fields?.maxTokens + } + + revertToOriginalModel(): void { + this.model = this.configuredModel + this.maxTokens = this.configuredMaxToken + } + + setMultiModalOption(multiModalOption: IMultiModalOption): void { + this.multiModalOption = multiModalOption + } + + setVisionModel(): void { + // pass - OpenRouter models don't need model switching + } +} diff --git a/packages/components/nodes/chatmodels/ChatSambanova/ChatSambanova.ts b/packages/components/nodes/chatmodels/ChatSambanova/ChatSambanova.ts new file mode 100644 index 000000000..a62ebfb30 --- /dev/null +++ b/packages/components/nodes/chatmodels/ChatSambanova/ChatSambanova.ts @@ -0,0 +1,123 @@ +import { BaseCache } from '@langchain/core/caches' +import { ChatOpenAI, ChatOpenAIFields } from '@langchain/openai' +import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' + +class ChatSambanova_ChatModels implements INode { + label: string + name: string + version: number + type: string + icon: string + category: string + description: string + baseClasses: string[] + credential: INodeParams + inputs: INodeParams[] + + constructor() { + this.label = 'ChatSambanova' + this.name = 'chatSambanova' + this.version = 1.0 + this.type = 'ChatSambanova' + this.icon = 'sambanova.png' + this.category = 'Chat Models' + this.description = 'Wrapper around Sambanova Chat Endpoints' + this.baseClasses = [this.type, ...getBaseClasses(ChatOpenAI)] + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + credentialNames: ['sambanovaApi'] + } + this.inputs = [ + { + label: 'Cache', + name: 'cache', + type: 'BaseCache', + optional: true + }, + { + label: 'Model', + name: 'modelName', + type: 'string', + default: 'Meta-Llama-3.3-70B-Instruct', + placeholder: 'Meta-Llama-3.3-70B-Instruct' + }, + { + label: 'Temperature', + name: 'temperature', + type: 'number', + step: 0.1, + default: 0.9, + optional: true + }, + { + label: 'Streaming', + name: 'streaming', + type: 'boolean', + default: true, + optional: true + }, + { + label: 'BasePath', + name: 'basepath', + type: 'string', + optional: true, + default: 'htps://api.sambanova.ai/v1', + additionalParams: true + }, + { + label: 'BaseOptions', + name: 'baseOptions', + type: 'json', + optional: true, + additionalParams: true + } + ] + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const cache = nodeData.inputs?.cache as BaseCache + const temperature = nodeData.inputs?.temperature as string + const modelName = nodeData.inputs?.modelName as string + const streaming = nodeData.inputs?.streaming as boolean + const basePath = nodeData.inputs?.basepath as string + const baseOptions = nodeData.inputs?.baseOptions + + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const sambanovaApiKey = getCredentialParam('sambanovaApiKey', credentialData, nodeData) + + const obj: ChatOpenAIFields = { + temperature: temperature ? parseFloat(temperature) : undefined, + model: modelName, + apiKey: sambanovaApiKey, + openAIApiKey: sambanovaApiKey, + streaming: streaming ?? true + } + + if (cache) obj.cache = cache + + let parsedBaseOptions: any | undefined = undefined + + if (baseOptions) { + try { + parsedBaseOptions = typeof baseOptions === 'object' ? baseOptions : JSON.parse(baseOptions) + } catch (exception) { + throw new Error("Invalid JSON in the ChatSambanova's BaseOptions: " + exception) + } + } + + if (basePath || parsedBaseOptions) { + obj.configuration = { + baseURL: basePath, + defaultHeaders: parsedBaseOptions + } + } + + const model = new ChatOpenAI(obj) + return model + } +} + +module.exports = { nodeClass: ChatSambanova_ChatModels } diff --git a/packages/components/nodes/chatmodels/ChatSambanova/sambanova.png b/packages/components/nodes/chatmodels/ChatSambanova/sambanova.png new file mode 100644 index 000000000..8bc16c5d5 Binary files /dev/null and b/packages/components/nodes/chatmodels/ChatSambanova/sambanova.png differ diff --git a/packages/components/nodes/chatmodels/ChatXAI/ChatXAI.ts b/packages/components/nodes/chatmodels/ChatXAI/ChatXAI.ts index a6f41e884..2495522ce 100644 --- a/packages/components/nodes/chatmodels/ChatXAI/ChatXAI.ts +++ b/packages/components/nodes/chatmodels/ChatXAI/ChatXAI.ts @@ -1,7 +1,8 @@ import { BaseCache } from '@langchain/core/caches' -import { ChatXAI, ChatXAIInput } from '@langchain/xai' -import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' +import { ChatXAIInput } from '@langchain/xai' +import { ICommonObject, IMultiModalOption, INode, INodeData, INodeParams } from '../../../src/Interface' import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' +import { ChatXAI } from './FlowiseChatXAI' class ChatXAI_ChatModels implements INode { label: string @@ -18,7 +19,7 @@ class ChatXAI_ChatModels implements INode { constructor() { this.label = 'ChatXAI' this.name = 'chatXAI' - this.version = 1.0 + this.version = 2.0 this.type = 'ChatXAI' this.icon = 'xai.png' this.category = 'Chat Models' @@ -74,6 +75,15 @@ class ChatXAI_ChatModels implements INode { step: 1, optional: true, additionalParams: true + }, + { + label: 'Allow Image Uploads', + name: 'allowImageUploads', + type: 'boolean', + description: + 'Allow image input. Refer to the docs for more details.', + default: false, + optional: true } ] } @@ -84,6 +94,7 @@ class ChatXAI_ChatModels implements INode { const modelName = nodeData.inputs?.modelName as string const maxTokens = nodeData.inputs?.maxTokens as string const streaming = nodeData.inputs?.streaming as boolean + const allowImageUploads = nodeData.inputs?.allowImageUploads as boolean const credentialData = await getCredentialData(nodeData.credential ?? '', options) const xaiApiKey = getCredentialParam('xaiApiKey', credentialData, nodeData) @@ -97,7 +108,15 @@ class ChatXAI_ChatModels implements INode { if (cache) obj.cache = cache if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10) - const model = new ChatXAI(obj) + const multiModalOption: IMultiModalOption = { + image: { + allowImageUploads: allowImageUploads ?? false + } + } + + const model = new ChatXAI(nodeData.id, obj) + model.setMultiModalOption(multiModalOption) + return model } } diff --git a/packages/components/nodes/chatmodels/ChatXAI/FlowiseChatXAI.ts b/packages/components/nodes/chatmodels/ChatXAI/FlowiseChatXAI.ts new file mode 100644 index 000000000..e315a29a9 --- /dev/null +++ b/packages/components/nodes/chatmodels/ChatXAI/FlowiseChatXAI.ts @@ -0,0 +1,29 @@ +import { ChatXAI as LCChatXAI, ChatXAIInput } from '@langchain/xai' +import { IMultiModalOption, IVisionChatModal } from '../../../src' + +export class ChatXAI extends LCChatXAI implements IVisionChatModal { + configuredModel: string + configuredMaxToken?: number + multiModalOption: IMultiModalOption + id: string + + constructor(id: string, fields?: ChatXAIInput) { + super(fields) + this.id = id + this.configuredModel = fields?.model ?? '' + this.configuredMaxToken = fields?.maxTokens + } + + revertToOriginalModel(): void { + this.modelName = this.configuredModel + this.maxTokens = this.configuredMaxToken + } + + setMultiModalOption(multiModalOption: IMultiModalOption): void { + this.multiModalOption = multiModalOption + } + + setVisionModel(): void { + // pass + } +} diff --git a/packages/components/nodes/chatmodels/Deepseek/Deepseek.ts b/packages/components/nodes/chatmodels/Deepseek/Deepseek.ts index 5f5e95563..fa92a5633 100644 --- a/packages/components/nodes/chatmodels/Deepseek/Deepseek.ts +++ b/packages/components/nodes/chatmodels/Deepseek/Deepseek.ts @@ -153,6 +153,7 @@ class Deepseek_ChatModels implements INode { temperature: parseFloat(temperature), modelName, openAIApiKey, + apiKey: openAIApiKey, streaming: streaming ?? true } diff --git a/packages/components/nodes/documentloaders/API/APILoader.ts b/packages/components/nodes/documentloaders/API/APILoader.ts index 02b77f789..479ad2e94 100644 --- a/packages/components/nodes/documentloaders/API/APILoader.ts +++ b/packages/components/nodes/documentloaders/API/APILoader.ts @@ -1,8 +1,10 @@ -import axios, { AxiosRequestConfig } from 'axios' -import { omit } from 'lodash' import { Document } from '@langchain/core/documents' -import { TextSplitter } from 'langchain/text_splitter' +import axios, { AxiosRequestConfig } from 'axios' +import * as https from 'https' import { BaseDocumentLoader } from 'langchain/document_loaders/base' +import { TextSplitter } from 'langchain/text_splitter' +import { omit } from 'lodash' +import { getFileFromStorage } from '../../../src' import { ICommonObject, IDocument, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' import { handleEscapeCharacters } from '../../../src/utils' @@ -21,7 +23,7 @@ class API_DocumentLoaders implements INode { constructor() { this.label = 'API Loader' this.name = 'apiLoader' - this.version = 2.0 + this.version = 2.1 this.type = 'Document' this.icon = 'api.svg' this.category = 'Document Loaders' @@ -61,6 +63,15 @@ class API_DocumentLoaders implements INode { additionalParams: true, optional: true }, + { + label: 'SSL Certificate', + description: 'Please upload a SSL certificate file in either .pem or .crt', + name: 'caFile', + type: 'file', + fileType: '.pem, .crt', + additionalParams: true, + optional: true + }, { label: 'Body', name: 'body', @@ -84,7 +95,7 @@ class API_DocumentLoaders implements INode { type: 'string', rows: 4, description: - 'Each document loader comes with a default set of metadata keys that are extracted from the document. You can use this field to omit some of the default metadata keys. The value should be a list of keys, seperated by comma. Use * to omit all metadata keys execept the ones you specify in the Additional Metadata field', + 'Each document loader comes with a default set of metadata keys that are extracted from the document. You can use this field to omit some of the default metadata keys. The value should be a list of keys, separated by comma. Use * to omit all metadata keys except the ones you specify in the Additional Metadata field', placeholder: 'key1, key2, key3.nestedKey1', optional: true, additionalParams: true @@ -105,8 +116,10 @@ class API_DocumentLoaders implements INode { } ] } - async init(nodeData: INodeData): Promise { + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { const headers = nodeData.inputs?.headers as string + const caFileBase64 = nodeData.inputs?.caFile as string const url = nodeData.inputs?.url as string const body = nodeData.inputs?.body as string const method = nodeData.inputs?.method as string @@ -120,22 +133,37 @@ class API_DocumentLoaders implements INode { omitMetadataKeys = _omitMetadataKeys.split(',').map((key) => key.trim()) } - const options: ApiLoaderParams = { + const apiLoaderParam: ApiLoaderParams = { url, method } if (headers) { const parsedHeaders = typeof headers === 'object' ? headers : JSON.parse(headers) - options.headers = parsedHeaders + apiLoaderParam.headers = parsedHeaders + } + + if (caFileBase64.startsWith('FILE-STORAGE::')) { + let file = caFileBase64.replace('FILE-STORAGE::', '') + file = file.replace('[', '') + file = file.replace(']', '') + const orgId = options.orgId + const chatflowid = options.chatflowid + const fileData = await getFileFromStorage(file, orgId, chatflowid) + apiLoaderParam.ca = fileData.toString() + } else { + const splitDataURI = caFileBase64.split(',') + splitDataURI.pop() + const bf = Buffer.from(splitDataURI.pop() || '', 'base64') + apiLoaderParam.ca = bf.toString('utf-8') } if (body) { const parsedBody = typeof body === 'object' ? body : JSON.parse(body) - options.body = parsedBody + apiLoaderParam.body = parsedBody } - const loader = new ApiLoader(options) + const loader = new ApiLoader(apiLoaderParam) let docs: IDocument[] = [] @@ -195,6 +223,7 @@ interface ApiLoaderParams { method: string headers?: ICommonObject body?: ICommonObject + ca?: string } class ApiLoader extends BaseDocumentLoader { @@ -206,28 +235,36 @@ class ApiLoader extends BaseDocumentLoader { public readonly method: string - constructor({ url, headers, body, method }: ApiLoaderParams) { + public readonly ca?: string + + constructor({ url, headers, body, method, ca }: ApiLoaderParams) { super() this.url = url this.headers = headers this.body = body this.method = method + this.ca = ca } public async load(): Promise { if (this.method === 'POST') { - return this.executePostRequest(this.url, this.headers, this.body) + return this.executePostRequest(this.url, this.headers, this.body, this.ca) } else { - return this.executeGetRequest(this.url, this.headers) + return this.executeGetRequest(this.url, this.headers, this.ca) } } - protected async executeGetRequest(url: string, headers?: ICommonObject): Promise { + protected async executeGetRequest(url: string, headers?: ICommonObject, ca?: string): Promise { try { const config: AxiosRequestConfig = {} if (headers) { config.headers = headers } + if (ca) { + config.httpsAgent = new https.Agent({ + ca: ca + }) + } const response = await axios.get(url, config) const responseJsonString = JSON.stringify(response.data, null, 2) const doc = new Document({ @@ -242,12 +279,17 @@ class ApiLoader extends BaseDocumentLoader { } } - protected async executePostRequest(url: string, headers?: ICommonObject, body?: ICommonObject): Promise { + protected async executePostRequest(url: string, headers?: ICommonObject, body?: ICommonObject, ca?: string): Promise { try { const config: AxiosRequestConfig = {} if (headers) { config.headers = headers } + if (ca) { + config.httpsAgent = new https.Agent({ + ca: ca + }) + } const response = await axios.post(url, body ?? {}, config) const responseJsonString = JSON.stringify(response.data, null, 2) const doc = new Document({ diff --git a/packages/components/nodes/documentloaders/ApifyWebsiteContentCrawler/apify-symbol-transparent.svg b/packages/components/nodes/documentloaders/ApifyWebsiteContentCrawler/apify-symbol-transparent.svg index 457caaaaa..c8894c844 100644 --- a/packages/components/nodes/documentloaders/ApifyWebsiteContentCrawler/apify-symbol-transparent.svg +++ b/packages/components/nodes/documentloaders/ApifyWebsiteContentCrawler/apify-symbol-transparent.svg @@ -1,5 +1,12 @@ - - - - + + + + + + + + + + + diff --git a/packages/components/nodes/documentloaders/Cheerio/Cheerio.ts b/packages/components/nodes/documentloaders/Cheerio/Cheerio.ts index bf5bf3387..9ef16850c 100644 --- a/packages/components/nodes/documentloaders/Cheerio/Cheerio.ts +++ b/packages/components/nodes/documentloaders/Cheerio/Cheerio.ts @@ -123,6 +123,7 @@ class Cheerio_DocumentLoaders implements INode { const selectedLinks = nodeData.inputs?.selectedLinks as string[] let limit = parseInt(nodeData.inputs?.limit as string) const output = nodeData.outputs?.output as string + const orgId = options.orgId const _omitMetadataKeys = nodeData.inputs?.omitMetadataKeys as string @@ -149,7 +150,8 @@ class Cheerio_DocumentLoaders implements INode { try { let docs: IDocument[] = [] if (url.endsWith('.pdf')) { - if (process.env.DEBUG === 'true') options.logger.info(`CheerioWebBaseLoader does not support PDF files: ${url}`) + if (process.env.DEBUG === 'true') + options.logger.info(`[${orgId}]: CheerioWebBaseLoader does not support PDF files: ${url}`) return docs } const loader = new CheerioWebBaseLoader(url, params) @@ -161,7 +163,8 @@ class Cheerio_DocumentLoaders implements INode { } return docs } catch (err) { - if (process.env.DEBUG === 'true') options.logger.error(`error in CheerioWebBaseLoader: ${err.message}, on page: ${url}`) + if (process.env.DEBUG === 'true') + options.logger.error(`[${orgId}]: Error in CheerioWebBaseLoader: ${err.message}, on page: ${url}`) return [] } } @@ -169,7 +172,7 @@ class Cheerio_DocumentLoaders implements INode { let docs: IDocument[] = [] if (relativeLinksMethod) { - if (process.env.DEBUG === 'true') options.logger.info(`Start ${relativeLinksMethod}`) + if (process.env.DEBUG === 'true') options.logger.info(`[${orgId}]: Start CheerioWebBaseLoader ${relativeLinksMethod}`) // if limit is 0 we don't want it to default to 10 so we check explicitly for null or undefined // so when limit is 0 we can fetch all the links if (limit === null || limit === undefined) limit = 10 @@ -180,15 +183,18 @@ class Cheerio_DocumentLoaders implements INode { : relativeLinksMethod === 'webCrawl' ? await webCrawl(url, limit) : await xmlScrape(url, limit) - if (process.env.DEBUG === 'true') options.logger.info(`pages: ${JSON.stringify(pages)}, length: ${pages.length}`) + if (process.env.DEBUG === 'true') + options.logger.info(`[${orgId}]: CheerioWebBaseLoader pages: ${JSON.stringify(pages)}, length: ${pages.length}`) if (!pages || pages.length === 0) throw new Error('No relative links found') for (const page of pages) { docs.push(...(await cheerioLoader(page))) } - if (process.env.DEBUG === 'true') options.logger.info(`Finish ${relativeLinksMethod}`) + if (process.env.DEBUG === 'true') options.logger.info(`[${orgId}]: Finish CheerioWebBaseLoader ${relativeLinksMethod}`) } else if (selectedLinks && selectedLinks.length > 0) { if (process.env.DEBUG === 'true') - options.logger.info(`pages: ${JSON.stringify(selectedLinks)}, length: ${selectedLinks.length}`) + options.logger.info( + `[${orgId}]: CheerioWebBaseLoader pages: ${JSON.stringify(selectedLinks)}, length: ${selectedLinks.length}` + ) for (const page of selectedLinks.slice(0, limit)) { docs.push(...(await cheerioLoader(page))) } diff --git a/packages/components/nodes/documentloaders/Csv/Csv.ts b/packages/components/nodes/documentloaders/Csv/Csv.ts index 10186cf23..9e251a3e8 100644 --- a/packages/components/nodes/documentloaders/Csv/Csv.ts +++ b/packages/components/nodes/documentloaders/Csv/Csv.ts @@ -107,9 +107,9 @@ class Csv_DocumentLoaders implements INode { return { files, fromStorage } } - async getFileData(file: string, { chatflowid }: { chatflowid: string }, fromStorage?: boolean) { + async getFileData(file: string, { orgId, chatflowid }: { orgId: string; chatflowid: string }, fromStorage?: boolean) { if (fromStorage) { - return getFileFromStorage(file, chatflowid) + return getFileFromStorage(file, orgId, chatflowid) } else { const splitDataURI = file.split(',') splitDataURI.pop() @@ -126,6 +126,7 @@ class Csv_DocumentLoaders implements INode { let docs: IDocument[] = [] + const orgId = options.orgId const chatflowid = options.chatflowid const { files, fromStorage } = this.getFiles(nodeData) @@ -133,7 +134,7 @@ class Csv_DocumentLoaders implements INode { for (const file of files) { if (!file) continue - const fileData = await this.getFileData(file, { chatflowid }, fromStorage) + const fileData = await this.getFileData(file, { orgId, chatflowid }, fromStorage) const blob = new Blob([fileData]) const loader = new CSVLoader(blob, columnName.trim().length === 0 ? undefined : columnName.trim()) diff --git a/packages/components/nodes/documentloaders/Csv/CsvLoader.ts b/packages/components/nodes/documentloaders/Csv/CsvLoader.ts index 35d4a1fce..df0d19cff 100644 --- a/packages/components/nodes/documentloaders/Csv/CsvLoader.ts +++ b/packages/components/nodes/documentloaders/Csv/CsvLoader.ts @@ -2,7 +2,7 @@ import { TextLoader } from 'langchain/document_loaders/fs/text' import Papa from 'papaparse' type CSVLoaderOptions = { - // Return specifific column from key (string) or index (integer) + // Return specific column from key (string) or index (integer) column?: string | number // Force separator (default: auto detect) separator?: string diff --git a/packages/components/nodes/documentloaders/CustomDocumentLoader/CustomDocumentLoader.ts b/packages/components/nodes/documentloaders/CustomDocumentLoader/CustomDocumentLoader.ts index ed19a01e7..634a660cf 100644 --- a/packages/components/nodes/documentloaders/CustomDocumentLoader/CustomDocumentLoader.ts +++ b/packages/components/nodes/documentloaders/CustomDocumentLoader/CustomDocumentLoader.ts @@ -1,7 +1,6 @@ import { ICommonObject, IDatabaseEntity, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' -import { NodeVM } from '@flowiseai/nodevm' import { DataSource } from 'typeorm' -import { availableDependencies, defaultAllowBuiltInDep, getVars, handleEscapeCharacters, prepareSandboxVars } from '../../../src/utils' +import { getVars, handleEscapeCharacters, executeJavaScriptCode, createCodeExecutionSandbox } from '../../../src/utils' class CustomDocumentLoader_DocumentLoaders implements INode { label: string @@ -72,7 +71,7 @@ class CustomDocumentLoader_DocumentLoaders implements INode { const appDataSource = options.appDataSource as DataSource const databaseEntities = options.databaseEntities as IDatabaseEntity - const variables = await getVars(appDataSource, databaseEntities, nodeData) + const variables = await getVars(appDataSource, databaseEntities, nodeData, options) const flow = { chatflowId: options.chatflowid, sessionId: options.sessionId, @@ -106,44 +105,22 @@ class CustomDocumentLoader_DocumentLoaders implements INode { } } - let sandbox: any = { - $input: input, - util: undefined, - Symbol: undefined, - child_process: undefined, - fs: undefined, - process: undefined - } - sandbox['$vars'] = prepareSandboxVars(variables) - sandbox['$flow'] = flow + // Create additional sandbox variables + const additionalSandbox: ICommonObject = {} + // Add input variables to sandbox if (Object.keys(inputVars).length) { for (const item in inputVars) { - sandbox[`$${item}`] = inputVars[item] + additionalSandbox[`$${item}`] = inputVars[item] } } - const builtinDeps = process.env.TOOL_FUNCTION_BUILTIN_DEP - ? defaultAllowBuiltInDep.concat(process.env.TOOL_FUNCTION_BUILTIN_DEP.split(',')) - : defaultAllowBuiltInDep - const externalDeps = process.env.TOOL_FUNCTION_EXTERNAL_DEP ? process.env.TOOL_FUNCTION_EXTERNAL_DEP.split(',') : [] - const deps = availableDependencies.concat(externalDeps) + const sandbox = createCodeExecutionSandbox(input, variables, flow, additionalSandbox) - const nodeVMOptions = { - console: 'inherit', - sandbox, - require: { - external: { modules: deps }, - builtin: builtinDeps - }, - eval: false, - wasm: false, - timeout: 10000 - } as any - - const vm = new NodeVM(nodeVMOptions) try { - const response = await vm.run(`module.exports = async function() {${javascriptFunction}}()`, __dirname) + const response = await executeJavaScriptCode(javascriptFunction, sandbox, { + libraries: ['axios'] + }) if (output === 'document' && Array.isArray(response)) { if (response.length === 0) return response diff --git a/packages/components/nodes/documentloaders/DocumentStore/DocStoreLoader.ts b/packages/components/nodes/documentloaders/DocumentStore/DocStoreLoader.ts index be66e90ad..e39b07ca1 100644 --- a/packages/components/nodes/documentloaders/DocumentStore/DocStoreLoader.ts +++ b/packages/components/nodes/documentloaders/DocumentStore/DocStoreLoader.ts @@ -60,7 +60,8 @@ class DocStore_DocumentLoaders implements INode { return returnData } - const stores = await appDataSource.getRepository(databaseEntities['DocumentStore']).find() + const searchOptions = options.searchOptions || {} + const stores = await appDataSource.getRepository(databaseEntities['DocumentStore']).findBy(searchOptions) for (const store of stores) { if (store.status === 'SYNC') { const obj = { diff --git a/packages/components/nodes/documentloaders/Docx/Docx.ts b/packages/components/nodes/documentloaders/Docx/Docx.ts index 70ab2c5c7..d59a1e2b3 100644 --- a/packages/components/nodes/documentloaders/Docx/Docx.ts +++ b/packages/components/nodes/documentloaders/Docx/Docx.ts @@ -96,11 +96,12 @@ class Docx_DocumentLoaders implements INode { } else { files = [fileName] } + const orgId = options.orgId const chatflowid = options.chatflowid for (const file of files) { if (!file) continue - const fileData = await getFileFromStorage(file, chatflowid) + const fileData = await getFileFromStorage(file, orgId, chatflowid) const blob = new Blob([fileData]) const loader = new DocxLoader(blob) diff --git a/packages/components/nodes/documentloaders/Epub/Epub.ts b/packages/components/nodes/documentloaders/Epub/Epub.ts index 088d01a4e..440cfe009 100644 --- a/packages/components/nodes/documentloaders/Epub/Epub.ts +++ b/packages/components/nodes/documentloaders/Epub/Epub.ts @@ -118,10 +118,11 @@ class Epub_DocumentLoaders implements INode { files = fileName.startsWith('[') && fileName.endsWith(']') ? JSON.parse(fileName) : [fileName] const chatflowid = options.chatflowid + const orgId = options.orgId for (const file of files) { if (!file) continue - const fileData = await getFileFromStorage(file, chatflowid) + const fileData = await getFileFromStorage(file, orgId, chatflowid) const tempFilePath = path.join(tempDir, `${Date.now()}_${file}`) fs.writeFileSync(tempFilePath, fileData) await this.extractDocs(usage, tempFilePath, textSplitter, docs) diff --git a/packages/components/nodes/documentloaders/File/File.ts b/packages/components/nodes/documentloaders/File/File.ts index 68ec54df0..802163235 100644 --- a/packages/components/nodes/documentloaders/File/File.ts +++ b/packages/components/nodes/documentloaders/File/File.ts @@ -7,6 +7,8 @@ import { CSVLoader } from '@langchain/community/document_loaders/fs/csv' import { PDFLoader } from '@langchain/community/document_loaders/fs/pdf' import { DocxLoader } from '@langchain/community/document_loaders/fs/docx' import { BaseDocumentLoader } from 'langchain/document_loaders/base' +import { LoadOfSheet } from '../MicrosoftExcel/ExcelLoader' +import { PowerpointLoader } from '../MicrosoftPowerpoint/PowerpointLoader' import { Document } from '@langchain/core/documents' import { getFileFromStorage } from '../../../src/storageUtils' import { handleEscapeCharacters, mapMimeTypeToExt } from '../../../src/utils' @@ -30,7 +32,7 @@ class File_DocumentLoaders implements INode { this.type = 'Document' this.icon = 'file.svg' this.category = 'Document Loaders' - this.description = `A generic file loader that can load txt, json, csv, docx, pdf, and other files` + this.description = `A generic file loader that can load different file types` this.baseClasses = [this.type] this.inputs = [ { @@ -134,9 +136,10 @@ class File_DocumentLoaders implements INode { let files: string[] = [] const fileBlobs: { blob: Blob; ext: string }[] = [] + const processRaw = options.processRaw //FILE-STORAGE::["CONTRIBUTING.md","LICENSE.md","README.md"] - const totalFiles = getOverrideFileInputs(nodeData) || fileBase64 + const totalFiles = getOverrideFileInputs(nodeData, processRaw) || fileBase64 if (totalFiles.startsWith('FILE-STORAGE::')) { const fileName = totalFiles.replace('FILE-STORAGE::', '') if (fileName.startsWith('[') && fileName.endsWith(']')) { @@ -144,6 +147,7 @@ class File_DocumentLoaders implements INode { } else { files = [fileName] } + const orgId = options.orgId const chatflowid = options.chatflowid // specific to createAttachment to get files from chatId @@ -151,14 +155,14 @@ class File_DocumentLoaders implements INode { if (retrieveAttachmentChatId) { for (const file of files) { if (!file) continue - const fileData = await getFileFromStorage(file, chatflowid, options.chatId) + const fileData = await getFileFromStorage(file, orgId, chatflowid, options.chatId) const blob = new Blob([fileData]) fileBlobs.push({ blob, ext: file.split('.').pop() || '' }) } } else { for (const file of files) { if (!file) continue - const fileData = await getFileFromStorage(file, chatflowid) + const fileData = await getFileFromStorage(file, orgId, chatflowid) const blob = new Blob([fileData]) fileBlobs.push({ blob, ext: file.split('.').pop() || '' }) } @@ -211,11 +215,20 @@ class File_DocumentLoaders implements INode { json: (blob) => new JSONLoader(blob), jsonl: (blob) => new JSONLinesLoader(blob, '/' + pointerName.trim()), txt: (blob) => new TextLoader(blob), + html: (blob) => new TextLoader(blob), + css: (blob) => new TextLoader(blob), + js: (blob) => new TextLoader(blob), + xml: (blob) => new TextLoader(blob), + md: (blob) => new TextLoader(blob), csv: (blob) => new CSVLoader(blob), - xls: (blob) => new CSVLoader(blob), - xlsx: (blob) => new CSVLoader(blob), + xls: (blob) => new LoadOfSheet(blob), + xlsx: (blob) => new LoadOfSheet(blob), + xlsm: (blob) => new LoadOfSheet(blob), + xlsb: (blob) => new LoadOfSheet(blob), docx: (blob) => new DocxLoader(blob), doc: (blob) => new DocxLoader(blob), + ppt: (blob) => new PowerpointLoader(blob), + pptx: (blob) => new PowerpointLoader(blob), pdf: (blob) => pdfUsage === 'perFile' ? // @ts-ignore @@ -286,7 +299,7 @@ class File_DocumentLoaders implements INode { } } -const getOverrideFileInputs = (nodeData: INodeData) => { +const getOverrideFileInputs = (nodeData: INodeData, processRaw: boolean) => { const txtFileBase64 = nodeData.inputs?.txtFile as string const pdfFileBase64 = nodeData.inputs?.pdfFile as string const jsonFileBase64 = nodeData.inputs?.jsonFile as string @@ -294,6 +307,8 @@ const getOverrideFileInputs = (nodeData: INodeData) => { const jsonlinesFileBase64 = nodeData.inputs?.jsonlinesFile as string const docxFileBase64 = nodeData.inputs?.docxFile as string const yamlFileBase64 = nodeData.inputs?.yamlFile as string + const excelFileBase64 = nodeData.inputs?.excelFile as string + const powerpointFileBase64 = nodeData.inputs?.powerpointFile as string const removePrefix = (storageFile: string): string[] => { const fileName = storageFile.replace('FILE-STORAGE::', '') @@ -326,6 +341,16 @@ const getOverrideFileInputs = (nodeData: INodeData) => { if (yamlFileBase64) { files.push(...removePrefix(yamlFileBase64)) } + if (excelFileBase64) { + files.push(...removePrefix(excelFileBase64)) + } + if (powerpointFileBase64) { + files.push(...removePrefix(powerpointFileBase64)) + } + + if (processRaw) { + return files.length ? JSON.stringify(files) : '' + } return files.length ? `FILE-STORAGE::${JSON.stringify(files)}` : '' } diff --git a/packages/components/nodes/documentloaders/FireCrawl/FireCrawl.ts b/packages/components/nodes/documentloaders/FireCrawl/FireCrawl.ts index a2707d13b..27bc3c5b6 100644 --- a/packages/components/nodes/documentloaders/FireCrawl/FireCrawl.ts +++ b/packages/components/nodes/documentloaders/FireCrawl/FireCrawl.ts @@ -5,7 +5,6 @@ import { INode, INodeData, INodeParams, ICommonObject, INodeOutputsValue } from import { getCredentialData, getCredentialParam, handleEscapeCharacters } from '../../../src/utils' import axios, { AxiosResponse, AxiosRequestHeaders } from 'axios' import { z } from 'zod' -import { zodToJsonSchema } from 'zod-to-json-schema' // FirecrawlApp interfaces interface FirecrawlAppConfig { @@ -17,25 +16,24 @@ interface FirecrawlDocumentMetadata { title?: string description?: string language?: string - // ... (other metadata fields) + sourceURL?: string + statusCode?: number + error?: string [key: string]: any } interface FirecrawlDocument { - id?: string - url?: string - content: string markdown?: string html?: string - llm_extraction?: Record - createdAt?: Date - updatedAt?: Date - type?: string + rawHtml?: string + screenshot?: string + links?: string[] + actions?: { + screenshots?: string[] + } metadata: FirecrawlDocumentMetadata - childrenLinks?: string[] - provider?: string + llm_extraction?: Record warning?: string - index?: number } interface ScrapeResponse { @@ -46,9 +44,50 @@ interface ScrapeResponse { interface CrawlResponse { success: boolean - jobId?: string - data?: FirecrawlDocument[] + id: string + url: string error?: string + data?: FirecrawlDocument +} + +interface CrawlStatusResponse { + status: string + total: number + completed: number + creditsUsed: number + expiresAt: string + next?: string + data?: FirecrawlDocument[] +} + +interface ExtractResponse { + success: boolean + id: string + url: string + data?: Record +} + +interface SearchResult { + url: string + title: string + description: string +} + +interface SearchResponse { + success: boolean + data?: SearchResult[] + warning?: string +} + +interface SearchRequest { + query: string + limit?: number + tbs?: string + lang?: string + country?: string + location?: string + timeout?: number + ignoreInvalidURLs?: boolean } interface Params { @@ -60,6 +99,36 @@ interface Params { } } +interface ExtractRequest { + urls: string[] + prompt?: string + schema?: Record + enableWebSearch?: boolean + ignoreSitemap?: boolean + includeSubdomains?: boolean + showSources?: boolean + scrapeOptions?: { + formats?: string[] + onlyMainContent?: boolean + includeTags?: string | string[] + excludeTags?: string | string[] + mobile?: boolean + skipTlsVerification?: boolean + timeout?: number + jsonOptions?: { + schema?: Record + prompt?: string + } + } +} + +interface ExtractStatusResponse { + success: boolean + data: any + status: 'completed' | 'pending' | 'processing' | 'failed' | 'cancelled' + expiresAt: string +} + // FirecrawlApp class (not exported) class FirecrawlApp { private apiKey: string @@ -75,23 +144,51 @@ class FirecrawlApp { async scrapeUrl(url: string, params: Params | null = null): Promise { const headers = this.prepareHeaders() - let jsonData: Params = { url, ...params } - if (params?.extractorOptions?.extractionSchema) { - let schema = params.extractorOptions.extractionSchema - if (schema instanceof z.ZodSchema) { - schema = zodToJsonSchema(schema) + + // Create a clean payload with only valid parameters + const validParams: any = { + url, + formats: ['markdown'], + onlyMainContent: true + } + + // Add optional parameters if they exist + if (params?.scrapeOptions) { + if (params.scrapeOptions.includeTags) { + validParams.includeTags = Array.isArray(params.scrapeOptions.includeTags) + ? params.scrapeOptions.includeTags + : params.scrapeOptions.includeTags.split(',') } - jsonData = { - ...jsonData, - extractorOptions: { - ...params.extractorOptions, - extractionSchema: schema, - mode: params.extractorOptions.mode || 'llm-extraction' - } + if (params.scrapeOptions.excludeTags) { + validParams.excludeTags = Array.isArray(params.scrapeOptions.excludeTags) + ? params.scrapeOptions.excludeTags + : params.scrapeOptions.excludeTags.split(',') + } + if (params.scrapeOptions.mobile !== undefined) { + validParams.mobile = params.scrapeOptions.mobile + } + if (params.scrapeOptions.skipTlsVerification !== undefined) { + validParams.skipTlsVerification = params.scrapeOptions.skipTlsVerification + } + if (params.scrapeOptions.timeout) { + validParams.timeout = params.scrapeOptions.timeout } } + + // Add JSON options if they exist + if (params?.extractorOptions) { + validParams.jsonOptions = { + schema: params.extractorOptions.extractionSchema, + prompt: params.extractorOptions.extractionPrompt + } + } + try { - const response: AxiosResponse = await this.postRequest(this.apiUrl + '/v0/scrape', jsonData, headers) + const parameters = { + ...validParams, + integration: 'flowise' + } + const response: AxiosResponse = await this.postRequest(this.apiUrl + '/v1/scrape', parameters, headers) if (response.status === 200) { const responseData = response.data if (responseData.success) { @@ -114,25 +211,250 @@ class FirecrawlApp { waitUntilDone: boolean = true, pollInterval: number = 2, idempotencyKey?: string - ): Promise { + ): Promise { const headers = this.prepareHeaders(idempotencyKey) - let jsonData: Params = { url, ...params } + + // Create a clean payload with only valid parameters + const validParams: any = { + url + } + + // Add scrape options with only non-empty values + const scrapeOptions: any = { + formats: ['markdown'], + onlyMainContent: true + } + + // Add crawl-specific parameters if they exist and are not empty + if (params) { + const validCrawlParams = [ + 'excludePaths', + 'includePaths', + 'maxDepth', + 'maxDiscoveryDepth', + 'ignoreSitemap', + 'ignoreQueryParameters', + 'limit', + 'allowBackwardLinks', + 'allowExternalLinks', + 'delay' + ] + + validCrawlParams.forEach((param) => { + if (params[param] !== undefined && params[param] !== null && params[param] !== '') { + validParams[param] = params[param] + } + }) + } + + // Add scrape options if they exist and are not empty + if (params?.scrapeOptions) { + if (params.scrapeOptions.includePaths) { + const includePaths = Array.isArray(params.scrapeOptions.includePaths) + ? params.scrapeOptions.includePaths + : params.scrapeOptions.includePaths.split(',') + if (includePaths.length > 0) { + validParams.includePaths = includePaths + } + } + + if (params.scrapeOptions.excludePaths) { + const excludePaths = Array.isArray(params.scrapeOptions.excludePaths) + ? params.scrapeOptions.excludePaths + : params.scrapeOptions.excludePaths.split(',') + if (excludePaths.length > 0) { + validParams.excludePaths = excludePaths + } + } + + if (params.scrapeOptions.limit) { + validParams.limit = params.scrapeOptions.limit + } + + const validScrapeParams = ['mobile', 'skipTlsVerification', 'timeout', 'includeTags', 'excludeTags', 'onlyMainContent'] + + validScrapeParams.forEach((param) => { + if (params.scrapeOptions[param] !== undefined && params.scrapeOptions[param] !== null) { + scrapeOptions[param] = params.scrapeOptions[param] + } + }) + } + + // Only add scrapeOptions if it has more than just the default values + if (Object.keys(scrapeOptions).length > 2) { + validParams.scrapeOptions = scrapeOptions + } + try { - const response: AxiosResponse = await this.postRequest(this.apiUrl + '/v0/crawl', jsonData, headers) + const parameters = { + ...validParams, + integration: 'flowise' + } + const response: AxiosResponse = await this.postRequest(this.apiUrl + '/v1/crawl', parameters, headers) if (response.status === 200) { - const jobId: string = response.data.jobId + const crawlResponse = response.data as CrawlResponse + if (!crawlResponse.success) { + throw new Error(`Crawl request failed: ${crawlResponse.error || 'Unknown error'}`) + } + if (waitUntilDone) { - return this.monitorJobStatus(jobId, headers, pollInterval) + return this.monitorJobStatus(crawlResponse.id, headers, pollInterval) } else { - return { success: true, jobId } + return crawlResponse } } else { this.handleError(response, 'start crawl job') } + } catch (error: any) { + if (error.response?.data?.error) { + throw new Error(`Crawl failed: ${error.response.data.error}`) + } + throw new Error(`Crawl failed: ${error.message}`) + } + + return { success: false, id: '', url: '' } + } + + async extract( + request: ExtractRequest, + waitUntilDone: boolean = true, + pollInterval: number = 2 + ): Promise { + const headers = this.prepareHeaders() + + // Create a clean payload with only valid parameters + const validParams: any = { + urls: request.urls + } + + // Add optional parameters if they exist and are not empty + if (request.prompt) { + validParams.prompt = request.prompt + } + + if (request.schema) { + validParams.schema = request.schema + } + + const validExtractParams = ['enableWebSearch', 'ignoreSitemap', 'includeSubdomains', 'showSources'] as const + + validExtractParams.forEach((param) => { + if (request[param] !== undefined && request[param] !== null) { + validParams[param] = request[param] + } + }) + + // Add scrape options if they exist + if (request.scrapeOptions) { + const scrapeOptions: any = { + formats: ['markdown'], + onlyMainContent: true + } + + // Handle includeTags + if (request.scrapeOptions.includeTags) { + const includeTags = Array.isArray(request.scrapeOptions.includeTags) + ? request.scrapeOptions.includeTags + : request.scrapeOptions.includeTags.split(',') + if (includeTags.length > 0) { + scrapeOptions.includeTags = includeTags + } + } + + // Handle excludeTags + if (request.scrapeOptions.excludeTags) { + const excludeTags = Array.isArray(request.scrapeOptions.excludeTags) + ? request.scrapeOptions.excludeTags + : request.scrapeOptions.excludeTags.split(',') + if (excludeTags.length > 0) { + scrapeOptions.excludeTags = excludeTags + } + } + + // Add other scrape options if they exist and are not empty + const validScrapeParams = ['mobile', 'skipTlsVerification', 'timeout'] as const + + validScrapeParams.forEach((param) => { + if (request.scrapeOptions?.[param] !== undefined && request.scrapeOptions?.[param] !== null) { + scrapeOptions[param] = request.scrapeOptions[param] + } + }) + + // Add JSON options if they exist + if (request.scrapeOptions.jsonOptions) { + scrapeOptions.jsonOptions = {} + if (request.scrapeOptions.jsonOptions.schema) { + scrapeOptions.jsonOptions.schema = request.scrapeOptions.jsonOptions.schema + } + if (request.scrapeOptions.jsonOptions.prompt) { + scrapeOptions.jsonOptions.prompt = request.scrapeOptions.jsonOptions.prompt + } + } + + // Only add scrapeOptions if it has more than just the default values + if (Object.keys(scrapeOptions).length > 2) { + validParams.scrapeOptions = scrapeOptions + } + } + + try { + const parameters = { + ...validParams, + integration: 'flowise' + } + const response: AxiosResponse = await this.postRequest(this.apiUrl + '/v1/extract', parameters, headers) + if (response.status === 200) { + const extractResponse = response.data as ExtractResponse + if (waitUntilDone) { + return this.monitorExtractStatus(extractResponse.id, headers, pollInterval) + } else { + return extractResponse + } + } else { + this.handleError(response, 'start extract job') + } } catch (error: any) { throw new Error(error.message) } - return { success: false, error: 'Internal server error.' } + return { success: false, id: '', url: '' } + } + + async search(request: SearchRequest): Promise { + const headers = this.prepareHeaders() + + // Create a clean payload with only valid parameters + const validParams: any = { + query: request.query + } + + // Add optional parameters if they exist and are not empty + const validSearchParams = ['limit', 'tbs', 'lang', 'country', 'location', 'timeout', 'ignoreInvalidURLs'] as const + + validSearchParams.forEach((param) => { + if (request[param] !== undefined && request[param] !== null) { + validParams[param] = request[param] + } + }) + + try { + const parameters = { + ...validParams, + integration: 'flowise' + } + const response: AxiosResponse = await this.postRequest(this.apiUrl + '/v1/search', parameters, headers) + if (response.status === 200) { + const searchResponse = response.data as SearchResponse + if (!searchResponse.success) { + throw new Error(`Search request failed: ${searchResponse.warning || 'Unknown error'}`) + } + return searchResponse + } else { + this.handleError(response, 'perform search') + } + } catch (error: any) { + throw new Error(error.message) + } + return { success: false } } private prepareHeaders(idempotencyKey?: string): AxiosRequestHeaders { @@ -143,41 +465,67 @@ class FirecrawlApp { } as AxiosRequestHeaders & { 'x-idempotency-key'?: string } } - private postRequest(url: string, data: Params, headers: AxiosRequestHeaders): Promise { - return axios.post(url, data, { headers }) + private async postRequest(url: string, data: Params, headers: AxiosRequestHeaders): Promise { + const result = await axios.post(url, data, { headers }) + return result } private getRequest(url: string, headers: AxiosRequestHeaders): Promise { return axios.get(url, { headers }) } - private async monitorJobStatus(jobId: string, headers: AxiosRequestHeaders, checkInterval: number): Promise { + private async monitorJobStatus(jobId: string, headers: AxiosRequestHeaders, checkInterval: number): Promise { let isJobCompleted = false while (!isJobCompleted) { - const statusResponse: AxiosResponse = await this.getRequest(this.apiUrl + `/v0/crawl/status/${jobId}`, headers) + const statusResponse: AxiosResponse = await this.getRequest(this.apiUrl + `/v1/crawl/${jobId}`, headers) if (statusResponse.status === 200) { - const statusData = statusResponse.data + const statusData = statusResponse.data as CrawlStatusResponse switch (statusData.status) { case 'completed': isJobCompleted = true - if ('data' in statusData) { - return statusData.data - } else { - throw new Error('Crawl job completed but no data was returned') + return statusData + case 'scraping': + case 'failed': + if (statusData.status === 'failed') { + throw new Error('Crawl job failed') } - case 'active': - case 'paused': - case 'pending': - case 'queued': await new Promise((resolve) => setTimeout(resolve, Math.max(checkInterval, 2) * 1000)) break default: - throw new Error(`Crawl job failed or was stopped. Status: ${statusData.status}`) + throw new Error(`Unknown crawl status: ${statusData.status}`) } } else { this.handleError(statusResponse, 'check crawl status') } } + throw new Error('Failed to monitor job status') + } + + private async monitorExtractStatus(jobId: string, headers: AxiosRequestHeaders, checkInterval: number): Promise { + let isJobCompleted = false + while (!isJobCompleted) { + const statusResponse: AxiosResponse = await this.getRequest(this.apiUrl + `/v1/extract/${jobId}`, headers) + if (statusResponse.status === 200) { + const statusData = statusResponse.data as ExtractStatusResponse + switch (statusData.status) { + case 'completed': + isJobCompleted = true + return statusData + case 'processing': + case 'failed': + if (statusData.status === 'failed') { + throw new Error('Extract job failed') + } + await new Promise((resolve) => setTimeout(resolve, Math.max(checkInterval, 2) * 1000)) + break + default: + throw new Error(`Unknown extract status: ${statusData.status}`) + } + } else { + this.handleError(statusResponse, 'check extract status') + } + } + throw new Error('Failed to monitor extract status') } private handleError(response: AxiosResponse, action: string): void { @@ -192,29 +540,32 @@ class FirecrawlApp { // FireCrawl Loader interface FirecrawlLoaderParameters { - url: string + url?: string + query?: string apiKey?: string apiUrl?: string - mode?: 'crawl' | 'scrape' + mode?: 'crawl' | 'scrape' | 'extract' | 'search' params?: Record } -class FireCrawlLoader extends BaseDocumentLoader { +export class FireCrawlLoader extends BaseDocumentLoader { private apiKey: string private apiUrl: string - private url: string - private mode: 'crawl' | 'scrape' + private url?: string + private query?: string + private mode: 'crawl' | 'scrape' | 'extract' | 'search' private params?: Record constructor(loaderParams: FirecrawlLoaderParameters) { super() - const { apiKey, apiUrl, url, mode = 'crawl', params } = loaderParams + const { apiKey, apiUrl, url, query, mode = 'crawl', params } = loaderParams if (!apiKey) { throw new Error('Firecrawl API key not set. You can set it as FIRECRAWL_API_KEY in your .env file, or pass it to Firecrawl.') } this.apiKey = apiKey this.url = url + this.query = query this.mode = mode this.params = params this.apiUrl = apiUrl || 'https://api.firecrawl.dev' @@ -224,26 +575,113 @@ class FireCrawlLoader extends BaseDocumentLoader { const app = new FirecrawlApp({ apiKey: this.apiKey, apiUrl: this.apiUrl }) let firecrawlDocs: FirecrawlDocument[] - if (this.mode === 'scrape') { + if (this.mode === 'search') { + if (!this.query) { + throw new Error('Firecrawl: Query is required for search mode') + } + const response = await app.search({ query: this.query, ...this.params }) + if (!response.success) { + throw new Error(`Firecrawl: Failed to search. Warning: ${response.warning}`) + } + + // Convert search results to FirecrawlDocument format + firecrawlDocs = (response.data || []).map((result) => ({ + markdown: result.description, + metadata: { + title: result.title, + sourceURL: result.url, + description: result.description + } + })) + } else if (this.mode === 'scrape') { + if (!this.url) { + throw new Error('Firecrawl: URL is required for scrape mode') + } const response = await app.scrapeUrl(this.url, this.params) if (!response.success) { throw new Error(`Firecrawl: Failed to scrape URL. Error: ${response.error}`) } firecrawlDocs = [response.data as FirecrawlDocument] } else if (this.mode === 'crawl') { - const response = await app.crawlUrl(this.url, this.params, true) - firecrawlDocs = response as FirecrawlDocument[] + if (!this.url) { + throw new Error('Firecrawl: URL is required for crawl mode') + } + const response = await app.crawlUrl(this.url, this.params) + if ('status' in response) { + if (response.status === 'failed') { + throw new Error('Firecrawl: Crawl job failed') + } + firecrawlDocs = response.data || [] + } else { + if (!response.success) { + throw new Error(`Firecrawl: Failed to scrape URL. Error: ${response.error}`) + } + firecrawlDocs = [response.data as FirecrawlDocument] + } + } else if (this.mode === 'extract') { + if (!this.url) { + throw new Error('Firecrawl: URL is required for extract mode') + } + this.params!.urls = [this.url] + const response = await app.extract(this.params as any as ExtractRequest) + if (!response.success) { + throw new Error(`Firecrawl: Failed to extract URL.`) + } + + // Convert extract response to document format + if ('data' in response && response.data) { + // Create a document from the extracted data + const extractedData = response.data + const content = JSON.stringify(extractedData, null, 2) + + const metadata: Record = { + source: this.url, + type: 'extracted_data' + } + + // Add status and expiresAt if they exist in the response + if ('status' in response) { + metadata.status = response.status + } + if ('data' in response) { + metadata.data = response.data + } + if ('expiresAt' in response) { + metadata.expiresAt = response.expiresAt + } + + return [ + new Document({ + pageContent: content, + metadata + }) + ] + } + return [] } else { - throw new Error(`Unrecognized mode '${this.mode}'. Expected one of 'crawl', 'scrape'.`) + throw new Error(`Unrecognized mode '${this.mode}'. Expected one of 'crawl', 'scrape', 'extract', 'search'.`) } - return firecrawlDocs.map( - (doc) => - new Document({ - pageContent: doc.markdown || '', - metadata: doc.metadata || {} - }) - ) + // Convert Firecrawl documents to LangChain documents + const documents = firecrawlDocs.map((doc) => { + // Use markdown content if available, otherwise fallback to HTML or empty string + const content = doc.markdown || doc.html || doc.rawHtml || '' + + // Create a standard LangChain document + return new Document({ + pageContent: content, + metadata: { + ...doc.metadata, + source: doc.metadata?.sourceURL || this.url, + title: doc.metadata?.title, + description: doc.metadata?.description, + language: doc.metadata?.language, + statusCode: doc.metadata?.statusCode + } + }) + }) + + return documents } } @@ -266,7 +704,7 @@ class FireCrawl_DocumentLoaders implements INode { this.name = 'fireCrawl' this.type = 'Document' this.icon = 'firecrawl.png' - this.version = 2.1 + this.version = 4.0 this.category = 'Document Loaders' this.description = 'Load data from URL using FireCrawl' this.baseClasses = [this.type] @@ -284,14 +722,7 @@ class FireCrawl_DocumentLoaders implements INode { optional: true }, { - label: 'URLs', - name: 'url', - type: 'string', - description: 'URL to be crawled/scraped', - placeholder: 'https://docs.flowiseai.com' - }, - { - label: 'Crawler type', + label: 'Type', type: 'options', name: 'crawlerType', options: [ @@ -304,36 +735,54 @@ class FireCrawl_DocumentLoaders implements INode { label: 'Scrape', name: 'scrape', description: 'Scrape a URL and get its content' + }, + { + label: 'Extract', + name: 'extract', + description: 'Extract data from a URL' + }, + { + label: 'Search', + name: 'search', + description: 'Search the web using FireCrawl' } ], default: 'crawl' }, { - // maxCrawlPages - label: 'Max Crawl Pages', - name: 'maxCrawlPages', + label: 'URLs', + name: 'url', type: 'string', - description: 'Maximum number of pages to crawl', + description: 'URL to be crawled/scraped/extracted', + placeholder: 'https://docs.flowiseai.com', optional: true, - additionalParams: true + show: { + crawlerType: ['crawl', 'scrape', 'extract'] + } }, { - // generateImgAltText - label: 'Generate Image Alt Text', - name: 'generateImgAltText', - type: 'boolean', - description: 'Generate alt text for images', + // includeTags + label: 'Include Tags', + name: 'includeTags', + type: 'string', + description: 'Tags to include in the output. Use comma to separate multiple tags.', optional: true, - additionalParams: true + additionalParams: true, + show: { + crawlerType: ['scrape'] + } }, { - // returnOnlyUrls - label: 'Return Only URLs', - name: 'returnOnlyUrls', - type: 'boolean', - description: 'Return only URLs of the crawled pages', + // excludeTags + label: 'Exclude Tags', + name: 'excludeTags', + type: 'string', + description: 'Tags to exclude from the output. Use comma to separate multiple tags.', optional: true, - additionalParams: true + additionalParams: true, + show: { + crawlerType: ['scrape'] + } }, { // onlyMainContent @@ -342,9 +791,129 @@ class FireCrawl_DocumentLoaders implements INode { type: 'boolean', description: 'Extract only the main content of the page', optional: true, - additionalParams: true + additionalParams: true, + show: { + crawlerType: ['scrape'] + } + }, + { + // limit + label: 'Limit', + name: 'limit', + type: 'string', + description: 'Maximum number of pages to crawl', + optional: true, + additionalParams: true, + default: '10000', + show: { + crawlerType: ['crawl'] + } + }, + { + label: 'Include Paths', + name: 'includePaths', + type: 'string', + description: + 'URL pathname regex patterns that include matching URLs in the crawl. Only the paths that match the specified patterns will be included in the response.', + placeholder: `blog/.*, news/.*`, + optional: true, + additionalParams: true, + show: { + crawlerType: ['crawl'] + } + }, + { + label: 'Exclude Paths', + name: 'excludePaths', + type: 'string', + description: 'URL pathname regex patterns that exclude matching URLs from the crawl.', + placeholder: `blog/.*, news/.*`, + optional: true, + additionalParams: true, + show: { + crawlerType: ['crawl'] + } + }, + { + label: 'Schema', + name: 'extractSchema', + type: 'json', + description: 'JSON schema for data extraction', + optional: true, + additionalParams: true, + show: { + crawlerType: ['extract'] + } + }, + { + label: 'Prompt', + name: 'extractPrompt', + type: 'string', + description: 'Prompt for data extraction', + optional: true, + additionalParams: true, + show: { + crawlerType: ['extract'] + } + }, + { + label: 'Query', + name: 'searchQuery', + type: 'string', + description: 'Search query to find relevant content', + optional: true, + show: { + crawlerType: ['search'] + } + }, + { + label: 'Limit', + name: 'searchLimit', + type: 'string', + description: 'Maximum number of results to return', + optional: true, + additionalParams: true, + default: '5', + show: { + crawlerType: ['search'] + } + }, + { + label: 'Language', + name: 'searchLang', + type: 'string', + description: 'Language code for search results (e.g., en, es, fr)', + optional: true, + additionalParams: true, + default: 'en', + show: { + crawlerType: ['search'] + } + }, + { + label: 'Country', + name: 'searchCountry', + type: 'string', + description: 'Country code for search results (e.g., us, uk, ca)', + optional: true, + additionalParams: true, + default: 'us', + show: { + crawlerType: ['search'] + } + }, + { + label: 'Timeout', + name: 'searchTimeout', + type: 'number', + description: 'Timeout in milliseconds for search operation', + optional: true, + additionalParams: true, + default: 60000, + show: { + crawlerType: ['search'] + } } - // ... (other input parameters) ] this.outputs = [ { @@ -367,66 +936,97 @@ class FireCrawl_DocumentLoaders implements INode { const metadata = nodeData.inputs?.metadata const url = nodeData.inputs?.url as string const crawlerType = nodeData.inputs?.crawlerType as string - const maxCrawlPages = nodeData.inputs?.maxCrawlPages as string - const generateImgAltText = nodeData.inputs?.generateImgAltText as boolean - const returnOnlyUrls = nodeData.inputs?.returnOnlyUrls as boolean + const limit = nodeData.inputs?.limit as string const onlyMainContent = nodeData.inputs?.onlyMainContent as boolean const credentialData = await getCredentialData(nodeData.credential ?? '', options) const firecrawlApiToken = getCredentialParam('firecrawlApiToken', credentialData, nodeData) const firecrawlApiUrl = getCredentialParam('firecrawlApiUrl', credentialData, nodeData, 'https://api.firecrawl.dev') const output = nodeData.outputs?.output as string - const urlPatternsExcludes = nodeData.inputs?.urlPatternsExcludes - ? (nodeData.inputs.urlPatternsExcludes.split(',') as string[]) - : undefined - const urlPatternsIncludes = nodeData.inputs?.urlPatternsIncludes - ? (nodeData.inputs.urlPatternsIncludes.split(',') as string[]) - : undefined + // Validate URL only for non-search methods + if (crawlerType !== 'search' && !url) { + throw new Error('Firecrawl: URL is required for ' + crawlerType + ' mode') + } + + const includePaths = nodeData.inputs?.includePaths ? (nodeData.inputs.includePaths.split(',') as string[]) : undefined + const excludePaths = nodeData.inputs?.excludePaths ? (nodeData.inputs.excludePaths.split(',') as string[]) : undefined + + const includeTags = nodeData.inputs?.includeTags ? (nodeData.inputs.includeTags.split(',') as string[]) : undefined + const excludeTags = nodeData.inputs?.excludeTags ? (nodeData.inputs.excludeTags.split(',') as string[]) : undefined + + const extractSchema = nodeData.inputs?.extractSchema + const extractPrompt = nodeData.inputs?.extractPrompt as string + + const searchQuery = nodeData.inputs?.searchQuery as string + const searchLimit = nodeData.inputs?.searchLimit as string + const searchLang = nodeData.inputs?.searchLang as string + const searchCountry = nodeData.inputs?.searchCountry as string + const searchTimeout = nodeData.inputs?.searchTimeout as number const input: FirecrawlLoaderParameters = { url, - mode: crawlerType as 'crawl' | 'scrape', + query: searchQuery, + mode: crawlerType as 'crawl' | 'scrape' | 'extract' | 'search', apiKey: firecrawlApiToken, apiUrl: firecrawlApiUrl, params: { - crawlerOptions: { - includes: urlPatternsIncludes, - excludes: urlPatternsExcludes, - generateImgAltText, - returnOnlyUrls, - limit: maxCrawlPages ? parseFloat(maxCrawlPages) : undefined + scrapeOptions: { + includePaths, + excludePaths, + limit: limit ? parseInt(limit, 10) : 1000, + includeTags, + excludeTags }, - pageOptions: { - onlyMainContent - } + schema: extractSchema || undefined, + prompt: extractPrompt || undefined } } - const loader = new FireCrawlLoader(input) + // Add search-specific parameters only when in search mode + if (crawlerType === 'search') { + if (!searchQuery) { + throw new Error('Firecrawl: Search query is required for search mode') + } + input.params = { + limit: searchLimit ? parseInt(searchLimit, 10) : 5, + lang: searchLang, + country: searchCountry, + timeout: searchTimeout + } + } + + if (onlyMainContent === true) { + const scrapeOptions = input.params?.scrapeOptions as any + input.params!.scrapeOptions = { + ...scrapeOptions, + onlyMainContent: true + } + } + + const loader = new FireCrawlLoader(input) let docs = [] - if (textSplitter) { - docs = await loader.loadAndSplit(textSplitter) - } else { - docs = await loader.load() + // Load documents + docs = await loader.load() + + // Apply text splitting if configured + if (textSplitter && docs.length > 0) { + docs = await textSplitter.splitDocuments(docs) } + // Apply metadata if provided if (metadata) { const parsedMetadata = typeof metadata === 'object' ? metadata : JSON.parse(metadata) - let finaldocs = [] - for (const doc of docs) { - const newdoc = { - ...doc, - metadata: { - ...doc.metadata, - ...parsedMetadata - } + docs = docs.map((doc) => ({ + ...doc, + metadata: { + ...doc.metadata, + ...parsedMetadata } - finaldocs.push(newdoc) - } - return finaldocs + })) } + // Return based on output type if (output === 'document') { return docs } else { @@ -440,3 +1040,6 @@ class FireCrawl_DocumentLoaders implements INode { } module.exports = { nodeClass: FireCrawl_DocumentLoaders } + +// FOR TESTING PURPOSES +// export { FireCrawl_DocumentLoaders } diff --git a/packages/components/nodes/documentloaders/Folder/Folder.ts b/packages/components/nodes/documentloaders/Folder/Folder.ts index 1a6afe057..e8aa16a5b 100644 --- a/packages/components/nodes/documentloaders/Folder/Folder.ts +++ b/packages/components/nodes/documentloaders/Folder/Folder.ts @@ -7,7 +7,10 @@ import { JSONLinesLoader, JSONLoader } from 'langchain/document_loaders/fs/json' import { CSVLoader } from '@langchain/community/document_loaders/fs/csv' import { PDFLoader } from '@langchain/community/document_loaders/fs/pdf' import { DocxLoader } from '@langchain/community/document_loaders/fs/docx' +import { LoadOfSheet } from '../MicrosoftExcel/ExcelLoader' +import { PowerpointLoader } from '../MicrosoftPowerpoint/PowerpointLoader' import { handleEscapeCharacters } from '../../../src/utils' +import { isPathTraversal } from '../../../src/validator' class Folder_DocumentLoaders implements INode { label: string @@ -123,6 +126,14 @@ class Folder_DocumentLoaders implements INode { const _omitMetadataKeys = nodeData.inputs?.omitMetadataKeys as string const output = nodeData.outputs?.output as string + if (!folderPath) { + throw new Error('Folder path is required') + } + + if (isPathTraversal(folderPath)) { + throw new Error('Invalid folder path: Path traversal detected. Please provide a safe folder path.') + } + let omitMetadataKeys: string[] = [] if (_omitMetadataKeys) { omitMetadataKeys = _omitMetadataKeys.split(',').map((key) => key.trim()) @@ -135,10 +146,14 @@ class Folder_DocumentLoaders implements INode { '.jsonl': (blob) => new JSONLinesLoader(blob, '/' + pointerName.trim()), '.txt': (path) => new TextLoader(path), '.csv': (path) => new CSVLoader(path), - '.xls': (path) => new CSVLoader(path), - '.xlsx': (path) => new CSVLoader(path), + '.xls': (path) => new LoadOfSheet(path), + '.xlsx': (path) => new LoadOfSheet(path), + '.xlsm': (path) => new LoadOfSheet(path), + '.xlsb': (path) => new LoadOfSheet(path), '.doc': (path) => new DocxLoader(path), '.docx': (path) => new DocxLoader(path), + '.ppt': (path) => new PowerpointLoader(path), + '.pptx': (path) => new PowerpointLoader(path), '.pdf': (path) => pdfUsage === 'perFile' ? // @ts-ignore diff --git a/packages/components/nodes/documentloaders/GoogleDrive/GoogleDrive.ts b/packages/components/nodes/documentloaders/GoogleDrive/GoogleDrive.ts new file mode 100644 index 000000000..fe56b20f7 --- /dev/null +++ b/packages/components/nodes/documentloaders/GoogleDrive/GoogleDrive.ts @@ -0,0 +1,829 @@ +import { omit } from 'lodash' +import { ICommonObject, IDocument, INode, INodeData, INodeParams, INodeOptionsValue } from '../../../src/Interface' +import { TextSplitter } from 'langchain/text_splitter' +import { + convertMultiOptionsToStringArray, + getCredentialData, + getCredentialParam, + handleEscapeCharacters, + INodeOutputsValue, + refreshOAuth2Token +} from '../../../src' +import { PDFLoader } from '@langchain/community/document_loaders/fs/pdf' +import { DocxLoader } from '@langchain/community/document_loaders/fs/docx' +import { CSVLoader } from '@langchain/community/document_loaders/fs/csv' +import * as fs from 'fs' +import * as path from 'path' +import * as os from 'os' +import { LoadOfSheet } from '../MicrosoftExcel/ExcelLoader' +import { PowerpointLoader } from '../MicrosoftPowerpoint/PowerpointLoader' + +// Helper function to get human-readable MIME type labels +const getMimeTypeLabel = (mimeType: string): string | undefined => { + const mimeTypeLabels: { [key: string]: string } = { + 'application/vnd.google-apps.document': 'Google Doc', + 'application/vnd.google-apps.spreadsheet': 'Google Sheet', + 'application/vnd.google-apps.presentation': 'Google Slides', + 'application/pdf': 'PDF', + 'text/plain': 'Text File', + 'application/vnd.openxmlformats-officedocument.wordprocessingml.document': 'Word Doc', + 'application/vnd.openxmlformats-officedocument.presentationml.presentation': 'PowerPoint', + 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': 'Excel File' + } + return mimeTypeLabels[mimeType] || undefined +} + +class GoogleDrive_DocumentLoaders implements INode { + label: string + name: string + version: number + description: string + type: string + icon: string + category: string + baseClasses: string[] + credential: INodeParams + inputs: INodeParams[] + outputs: INodeOutputsValue[] + + constructor() { + this.label = 'Google Drive' + this.name = 'googleDrive' + this.version = 1.0 + this.type = 'Document' + this.icon = 'google-drive.svg' + this.category = 'Document Loaders' + this.description = `Load documents from Google Drive files` + this.baseClasses = [this.type] + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + description: 'Google Drive OAuth2 Credential', + credentialNames: ['googleDriveOAuth2'] + } + this.inputs = [ + { + label: 'Select Files', + name: 'selectedFiles', + type: 'asyncMultiOptions', + loadMethod: 'listFiles', + description: 'Select files from your Google Drive', + refresh: true, + optional: true + }, + { + label: 'Folder ID', + name: 'folderId', + type: 'string', + description: 'Google Drive folder ID to load all files from (alternative to selecting specific files)', + placeholder: '1BxiMVs0XRA5nFMdKvBdBZjgmUUqptlbs74OgvE2upms', + optional: true + }, + { + label: 'File Types', + name: 'fileTypes', + type: 'multiOptions', + description: 'Types of files to load', + options: [ + { + label: 'Google Docs', + name: 'application/vnd.google-apps.document' + }, + { + label: 'Google Sheets', + name: 'application/vnd.google-apps.spreadsheet' + }, + { + label: 'Google Slides', + name: 'application/vnd.google-apps.presentation' + }, + { + label: 'PDF Files', + name: 'application/pdf' + }, + { + label: 'Text Files', + name: 'text/plain' + }, + { + label: 'Word Documents', + name: 'application/vnd.openxmlformats-officedocument.wordprocessingml.document' + }, + { + label: 'PowerPoint', + name: 'application/vnd.openxmlformats-officedocument.presentationml.presentation' + }, + { + label: 'Excel Files', + name: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' + } + ], + default: [ + 'application/vnd.google-apps.document', + 'application/vnd.google-apps.spreadsheet', + 'application/vnd.google-apps.presentation', + 'text/plain', + 'application/pdf', + 'application/vnd.openxmlformats-officedocument.wordprocessingml.document', + 'application/vnd.openxmlformats-officedocument.presentationml.presentation', + 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' + ], + optional: true + }, + { + label: 'Include Subfolders', + name: 'includeSubfolders', + type: 'boolean', + description: 'Whether to include files from subfolders when loading from a folder', + default: false, + optional: true + }, + { + label: 'Include Shared Drives', + name: 'includeSharedDrives', + type: 'boolean', + description: 'Whether to include files from shared drives (Team Drives) that you have access to', + default: false, + optional: true + }, + { + label: 'Max Files', + name: 'maxFiles', + type: 'number', + description: 'Maximum number of files to load (default: 50)', + default: 50, + optional: true + }, + { + label: 'Text Splitter', + name: 'textSplitter', + type: 'TextSplitter', + optional: true + }, + { + label: 'Additional Metadata', + name: 'metadata', + type: 'json', + description: 'Additional metadata to be added to the extracted documents', + optional: true, + additionalParams: true + }, + { + label: 'Omit Metadata Keys', + name: 'omitMetadataKeys', + type: 'string', + rows: 4, + description: + 'Each document loader comes with a default set of metadata keys that are extracted from the document. You can use this field to omit some of the default metadata keys. The value should be a list of keys, seperated by comma. Use * to omit all metadata keys execept the ones you specify in the Additional Metadata field', + placeholder: 'key1, key2, key3.nestedKey1', + optional: true, + additionalParams: true + } + ] + this.outputs = [ + { + label: 'Document', + name: 'document', + description: 'Array of document objects containing metadata and pageContent', + baseClasses: [...this.baseClasses, 'json'] + }, + { + label: 'Text', + name: 'text', + description: 'Concatenated string from pageContent of documents', + baseClasses: ['string', 'json'] + } + ] + } + + //@ts-ignore + loadMethods = { + async listFiles(nodeData: INodeData, options: ICommonObject): Promise { + const returnData: INodeOptionsValue[] = [] + + try { + let credentialData = await getCredentialData(nodeData.credential ?? '', options) + credentialData = await refreshOAuth2Token(nodeData.credential ?? '', credentialData, options) + const accessToken = getCredentialParam('access_token', credentialData, nodeData) + + if (!accessToken) { + return returnData + } + + // Get file types from input to filter + const fileTypes = convertMultiOptionsToStringArray(nodeData.inputs?.fileTypes) + const includeSharedDrives = nodeData.inputs?.includeSharedDrives as boolean + const maxFiles = (nodeData.inputs?.maxFiles as number) || 100 + + let query = 'trashed = false' + + // Add file type filter if specified + if (fileTypes && fileTypes.length > 0) { + const mimeTypeQuery = fileTypes.map((type) => `mimeType='${type}'`).join(' or ') + query += ` and (${mimeTypeQuery})` + } + + const url = new URL('https://www.googleapis.com/drive/v3/files') + url.searchParams.append('q', query) + url.searchParams.append('pageSize', Math.min(maxFiles, 1000).toString()) + url.searchParams.append('fields', 'files(id, name, mimeType, size, createdTime, modifiedTime, webViewLink, driveId)') + url.searchParams.append('orderBy', 'modifiedTime desc') + + // Add shared drives support if requested + if (includeSharedDrives) { + url.searchParams.append('supportsAllDrives', 'true') + url.searchParams.append('includeItemsFromAllDrives', 'true') + } + + const response = await fetch(url.toString(), { + headers: { + Authorization: `Bearer ${accessToken}`, + 'Content-Type': 'application/json' + } + }) + + if (!response.ok) { + console.error(`Failed to list files: ${response.statusText}`) + return returnData + } + + const data = await response.json() + + for (const file of data.files) { + const mimeTypeLabel = getMimeTypeLabel(file.mimeType) + if (!mimeTypeLabel) { + continue + } + + // Add drive context to description + const driveContext = file.driveId ? ' (Shared Drive)' : ' (My Drive)' + + const obj: INodeOptionsValue = { + name: file.id, + label: file.name, + description: `Type: ${mimeTypeLabel}${driveContext} | Modified: ${new Date(file.modifiedTime).toLocaleDateString()}` + } + returnData.push(obj) + } + } catch (error) { + console.error('Error listing Google Drive files:', error) + } + + return returnData + } + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const selectedFiles = nodeData.inputs?.selectedFiles as string + const folderId = nodeData.inputs?.folderId as string + const fileTypes = nodeData.inputs?.fileTypes as string[] + const includeSubfolders = nodeData.inputs?.includeSubfolders as boolean + const includeSharedDrives = nodeData.inputs?.includeSharedDrives as boolean + const maxFiles = (nodeData.inputs?.maxFiles as number) || 50 + const textSplitter = nodeData.inputs?.textSplitter as TextSplitter + const metadata = nodeData.inputs?.metadata + const _omitMetadataKeys = nodeData.inputs?.omitMetadataKeys as string + const output = nodeData.outputs?.output as string + + let omitMetadataKeys: string[] = [] + if (_omitMetadataKeys) { + omitMetadataKeys = _omitMetadataKeys.split(',').map((key) => key.trim()) + } + + if (!selectedFiles && !folderId) { + throw new Error('Either selected files or Folder ID is required') + } + + let credentialData = await getCredentialData(nodeData.credential ?? '', options) + credentialData = await refreshOAuth2Token(nodeData.credential ?? '', credentialData, options) + const accessToken = getCredentialParam('access_token', credentialData, nodeData) + + if (!accessToken) { + throw new Error('No access token found in credential') + } + + let docs: IDocument[] = [] + + try { + let filesToProcess: any[] = [] + + if (selectedFiles) { + // Load selected files (selectedFiles can be a single ID or comma-separated IDs) + let ids: string[] = [] + if (typeof selectedFiles === 'string' && selectedFiles.startsWith('[') && selectedFiles.endsWith(']')) { + ids = convertMultiOptionsToStringArray(selectedFiles) + } else if (typeof selectedFiles === 'string') { + ids = [selectedFiles] + } else if (Array.isArray(selectedFiles)) { + ids = selectedFiles + } + for (const id of ids) { + const fileInfo = await this.getFileInfo(id, accessToken, includeSharedDrives) + if (fileInfo && this.shouldProcessFile(fileInfo, fileTypes)) { + filesToProcess.push(fileInfo) + } + } + } else if (folderId) { + // Load files from folder + filesToProcess = await this.getFilesFromFolder( + folderId, + accessToken, + fileTypes, + includeSubfolders, + includeSharedDrives, + maxFiles + ) + } + + // Process each file + for (const fileInfo of filesToProcess) { + try { + const doc = await this.processFile(fileInfo, accessToken) + if (doc.length > 0) { + docs.push(...doc) + } + } catch (error) { + console.warn(`Failed to process file ${fileInfo.name}: ${error.message}`) + } + } + + // Apply text splitter if provided + if (textSplitter && docs.length > 0) { + docs = await textSplitter.splitDocuments(docs) + } + + // Apply metadata transformations + if (metadata) { + const parsedMetadata = typeof metadata === 'object' ? metadata : JSON.parse(metadata) + docs = docs.map((doc) => ({ + ...doc, + metadata: + _omitMetadataKeys === '*' + ? { + ...parsedMetadata + } + : omit( + { + ...doc.metadata, + ...parsedMetadata + }, + omitMetadataKeys + ) + })) + } else { + docs = docs.map((doc) => ({ + ...doc, + metadata: + _omitMetadataKeys === '*' + ? {} + : omit( + { + ...doc.metadata + }, + omitMetadataKeys + ) + })) + } + } catch (error) { + throw new Error(`Failed to load Google Drive documents: ${error.message}`) + } + + if (output === 'document') { + return docs + } else { + let finaltext = '' + for (const doc of docs) { + finaltext += `${doc.pageContent}\n` + } + return handleEscapeCharacters(finaltext, false) + } + } + + private async getFileInfo(fileId: string, accessToken: string, includeSharedDrives: boolean): Promise { + const url = new URL(`https://www.googleapis.com/drive/v3/files/${encodeURIComponent(fileId)}`) + url.searchParams.append('fields', 'id, name, mimeType, size, createdTime, modifiedTime, parents, webViewLink, driveId') + + // Add shared drives support if requested + if (includeSharedDrives) { + url.searchParams.append('supportsAllDrives', 'true') + } + + const response = await fetch(url.toString(), { + headers: { + Authorization: `Bearer ${accessToken}`, + 'Content-Type': 'application/json' + } + }) + + if (!response.ok) { + throw new Error(`Failed to get file info: ${response.statusText}`) + } + + const fileInfo = await response.json() + + // Add drive context to description + const driveContext = fileInfo.driveId ? ' (Shared Drive)' : ' (My Drive)' + + return { + ...fileInfo, + driveContext + } + } + + private async getFilesFromFolder( + folderId: string, + accessToken: string, + fileTypes: string[] | undefined, + includeSubfolders: boolean, + includeSharedDrives: boolean, + maxFiles: number + ): Promise { + const files: any[] = [] + let nextPageToken: string | undefined + + do { + let query = `'${folderId}' in parents and trashed = false` + + // Add file type filter if specified + if (fileTypes && fileTypes.length > 0) { + const mimeTypeQuery = fileTypes.map((type) => `mimeType='${type}'`).join(' or ') + query += ` and (${mimeTypeQuery})` + } + + const url = new URL('https://www.googleapis.com/drive/v3/files') + url.searchParams.append('q', query) + url.searchParams.append('pageSize', Math.min(maxFiles - files.length, 1000).toString()) + url.searchParams.append( + 'fields', + 'nextPageToken, files(id, name, mimeType, size, createdTime, modifiedTime, parents, webViewLink, driveId)' + ) + + // Add shared drives support if requested + if (includeSharedDrives) { + url.searchParams.append('supportsAllDrives', 'true') + url.searchParams.append('includeItemsFromAllDrives', 'true') + } + + if (nextPageToken) { + url.searchParams.append('pageToken', nextPageToken) + } + + const response = await fetch(url.toString(), { + headers: { + Authorization: `Bearer ${accessToken}`, + 'Content-Type': 'application/json' + } + }) + + if (!response.ok) { + throw new Error(`Failed to list files: ${response.statusText}`) + } + + const data = await response.json() + + // Add drive context to each file + const filesWithContext = data.files.map((file: any) => ({ + ...file, + driveContext: file.driveId ? ' (Shared Drive)' : ' (My Drive)' + })) + + files.push(...filesWithContext) + nextPageToken = data.nextPageToken + + // If includeSubfolders is true, also get files from subfolders + if (includeSubfolders) { + for (const file of data.files) { + if (file.mimeType === 'application/vnd.google-apps.folder') { + const subfolderFiles = await this.getFilesFromFolder( + file.id, + accessToken, + fileTypes, + includeSubfolders, + includeSharedDrives, + maxFiles - files.length + ) + files.push(...subfolderFiles) + } + } + } + } while (nextPageToken && files.length < maxFiles) + + return files.slice(0, maxFiles) + } + + private shouldProcessFile(fileInfo: any, fileTypes: string[] | undefined): boolean { + if (!fileTypes || fileTypes.length === 0) { + return true + } + return fileTypes.includes(fileInfo.mimeType) + } + + private async processFile(fileInfo: any, accessToken: string): Promise { + let content = '' + + try { + // Handle different file types + if (this.isTextBasedFile(fileInfo.mimeType)) { + // Download regular text files + content = await this.downloadFile(fileInfo.id, accessToken) + + // Create document with metadata + return [ + { + pageContent: content, + metadata: { + source: fileInfo.webViewLink || `https://drive.google.com/file/d/${fileInfo.id}/view`, + fileId: fileInfo.id, + fileName: fileInfo.name, + mimeType: fileInfo.mimeType, + size: fileInfo.size ? parseInt(fileInfo.size) : undefined, + createdTime: fileInfo.createdTime, + modifiedTime: fileInfo.modifiedTime, + parents: fileInfo.parents, + driveId: fileInfo.driveId, + driveContext: fileInfo.driveContext || (fileInfo.driveId ? ' (Shared Drive)' : ' (My Drive)') + } + } + ] + } else if (this.isSupportedBinaryFile(fileInfo.mimeType) || this.isGoogleWorkspaceFile(fileInfo.mimeType)) { + // Process binary files and Google Workspace files using loaders + return await this.processBinaryFile(fileInfo, accessToken) + } else { + console.warn(`Unsupported file type ${fileInfo.mimeType} for file ${fileInfo.name}`) + return [] + } + } catch (error) { + console.warn(`Failed to process file ${fileInfo.name}: ${error.message}`) + return [] + } + } + + private isSupportedBinaryFile(mimeType: string): boolean { + const supportedBinaryTypes = [ + 'application/pdf', + 'application/vnd.openxmlformats-officedocument.wordprocessingml.document', + 'application/msword', + 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet', + 'application/vnd.ms-excel', + 'text/csv' + ] + return supportedBinaryTypes.includes(mimeType) + } + + private async processBinaryFile(fileInfo: any, accessToken: string): Promise { + let tempFilePath: string | null = null + + try { + let buffer: Buffer + let processedMimeType: string + let processedFileName: string + + if (this.isGoogleWorkspaceFile(fileInfo.mimeType)) { + // Handle Google Workspace files by exporting to appropriate format + const exportResult = await this.exportGoogleWorkspaceFileAsBuffer(fileInfo.id, fileInfo.mimeType, accessToken) + buffer = exportResult.buffer + processedMimeType = exportResult.mimeType + processedFileName = exportResult.fileName + } else { + // Handle regular binary files + buffer = await this.downloadBinaryFile(fileInfo.id, accessToken) + processedMimeType = fileInfo.mimeType + processedFileName = fileInfo.name + } + + // Download file to temporary location + tempFilePath = await this.createTempFile(buffer, processedFileName, processedMimeType) + + let docs: IDocument[] = [] + const mimeType = processedMimeType.toLowerCase() + switch (mimeType) { + case 'application/pdf': { + const pdfLoader = new PDFLoader(tempFilePath, { + // @ts-ignore + pdfjs: () => import('pdf-parse/lib/pdf.js/v1.10.100/build/pdf.js') + }) + docs = await pdfLoader.load() + break + } + + case 'application/vnd.openxmlformats-officedocument.wordprocessingml.document': + case 'application/msword': { + const docxLoader = new DocxLoader(tempFilePath) + docs = await docxLoader.load() + break + } + + case 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': + case 'application/vnd.ms-excel': { + const excelLoader = new LoadOfSheet(tempFilePath) + docs = await excelLoader.load() + break + } + case 'application/vnd.openxmlformats-officedocument.presentationml.presentation': + case 'application/vnd.ms-powerpoint': { + const pptxLoader = new PowerpointLoader(tempFilePath) + docs = await pptxLoader.load() + break + } + case 'text/csv': { + const csvLoader = new CSVLoader(tempFilePath) + docs = await csvLoader.load() + break + } + + default: + throw new Error(`Unsupported binary file type: ${mimeType}`) + } + + // Add Google Drive metadata to each document + if (docs.length > 0) { + const googleDriveMetadata = { + source: fileInfo.webViewLink || `https://drive.google.com/file/d/${fileInfo.id}/view`, + fileId: fileInfo.id, + fileName: fileInfo.name, + mimeType: fileInfo.mimeType, + size: fileInfo.size ? parseInt(fileInfo.size) : undefined, + createdTime: fileInfo.createdTime, + modifiedTime: fileInfo.modifiedTime, + parents: fileInfo.parents, + totalPages: docs.length // Total number of pages/sheets in the file + } + + return docs.map((doc, index) => ({ + ...doc, + metadata: { + ...doc.metadata, // Keep original loader metadata (page numbers, etc.) + ...googleDriveMetadata, // Add Google Drive metadata + pageIndex: index, // Add page/sheet index + driveId: fileInfo.driveId, + driveContext: fileInfo.driveContext || (fileInfo.driveId ? ' (Shared Drive)' : ' (My Drive)') + } + })) + } + + return [] + } catch (error) { + throw new Error(`Failed to process binary file: ${error.message}`) + } finally { + // Clean up temporary file + if (tempFilePath && fs.existsSync(tempFilePath)) { + try { + fs.unlinkSync(tempFilePath) + } catch (e) { + console.warn(`Failed to delete temporary file: ${tempFilePath}`) + } + } + } + } + + private async createTempFile(buffer: Buffer, fileName: string, mimeType: string): Promise { + // Get appropriate file extension + let extension = path.extname(fileName) + if (!extension) { + const extensionMap: { [key: string]: string } = { + 'application/pdf': '.pdf', + 'application/vnd.openxmlformats-officedocument.wordprocessingml.document': '.docx', + 'application/msword': '.doc', + 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': '.xlsx', + 'application/vnd.ms-excel': '.xls', + 'application/vnd.openxmlformats-officedocument.presentationml.presentation': '.pptx', + 'application/vnd.ms-powerpoint': '.ppt', + 'text/csv': '.csv' + } + extension = extensionMap[mimeType] || '.tmp' + } + + // Create temporary file + const tempDir = os.tmpdir() + const tempFileName = `gdrive_${Date.now()}_${Math.random().toString(36).substring(7)}${extension}` + const tempFilePath = path.join(tempDir, tempFileName) + + fs.writeFileSync(tempFilePath, buffer) + return tempFilePath + } + + private async downloadBinaryFile(fileId: string, accessToken: string): Promise { + const url = `https://www.googleapis.com/drive/v3/files/${encodeURIComponent(fileId)}?alt=media` + + const response = await fetch(url, { + headers: { + Authorization: `Bearer ${accessToken}` + } + }) + + if (!response.ok) { + throw new Error(`Failed to download file: ${response.statusText}`) + } + + const arrayBuffer = await response.arrayBuffer() + return Buffer.from(arrayBuffer) + } + + private async downloadFile(fileId: string, accessToken: string): Promise { + const url = `https://www.googleapis.com/drive/v3/files/${encodeURIComponent(fileId)}?alt=media` + + const response = await fetch(url, { + headers: { + Authorization: `Bearer ${accessToken}` + } + }) + + if (!response.ok) { + throw new Error(`Failed to download file: ${response.statusText}`) + } + + // Only call response.text() for text-based files + const contentType = response.headers.get('content-type') || '' + if (!contentType.startsWith('text/') && !contentType.includes('json') && !contentType.includes('xml')) { + throw new Error(`Cannot process binary file with content-type: ${contentType}`) + } + + return await response.text() + } + + private isGoogleWorkspaceFile(mimeType: string): boolean { + const googleWorkspaceMimeTypes = [ + 'application/vnd.google-apps.document', + 'application/vnd.google-apps.spreadsheet', + 'application/vnd.google-apps.presentation', + 'application/vnd.google-apps.drawing' + ] + return googleWorkspaceMimeTypes.includes(mimeType) + } + + private isTextBasedFile(mimeType: string): boolean { + const textBasedMimeTypes = [ + 'text/plain', + 'text/html', + 'text/css', + 'text/javascript', + 'text/csv', + 'text/xml', + 'application/json', + 'application/xml', + 'text/markdown', + 'text/x-markdown' + ] + return textBasedMimeTypes.includes(mimeType) + } + + private async exportGoogleWorkspaceFileAsBuffer( + fileId: string, + mimeType: string, + accessToken: string + ): Promise<{ buffer: Buffer; mimeType: string; fileName: string }> { + // Automatic mapping of Google Workspace MIME types to export formats + let exportMimeType: string + let fileExtension: string + + switch (mimeType) { + case 'application/vnd.google-apps.document': + exportMimeType = 'application/vnd.openxmlformats-officedocument.wordprocessingml.document' + fileExtension = '.docx' + break + case 'application/vnd.google-apps.spreadsheet': + exportMimeType = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' + fileExtension = '.xlsx' + break + case 'application/vnd.google-apps.presentation': + exportMimeType = 'application/vnd.openxmlformats-officedocument.presentationml.presentation' + fileExtension = '.pptx' + break + case 'application/vnd.google-apps.drawing': + exportMimeType = 'application/pdf' + fileExtension = '.pdf' + break + default: + // Fallback to DOCX for any other Google Workspace file + exportMimeType = 'application/vnd.openxmlformats-officedocument.wordprocessingml.document' + fileExtension = '.docx' + break + } + + const url = `https://www.googleapis.com/drive/v3/files/${encodeURIComponent(fileId)}/export?mimeType=${encodeURIComponent( + exportMimeType + )}` + + const response = await fetch(url, { + headers: { + Authorization: `Bearer ${accessToken}` + } + }) + + if (!response.ok) { + throw new Error(`Failed to export file: ${response.statusText}`) + } + + const arrayBuffer = await response.arrayBuffer() + const buffer = Buffer.from(arrayBuffer) + + return { + buffer, + mimeType: exportMimeType, + fileName: `exported_file${fileExtension}` + } + } +} + +module.exports = { nodeClass: GoogleDrive_DocumentLoaders } diff --git a/packages/components/nodes/documentloaders/GoogleDrive/google-drive.svg b/packages/components/nodes/documentloaders/GoogleDrive/google-drive.svg new file mode 100644 index 000000000..03b2f2129 --- /dev/null +++ b/packages/components/nodes/documentloaders/GoogleDrive/google-drive.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/components/nodes/documentloaders/GoogleSheets/GoogleSheets.ts b/packages/components/nodes/documentloaders/GoogleSheets/GoogleSheets.ts new file mode 100644 index 000000000..8c7a7a2a4 --- /dev/null +++ b/packages/components/nodes/documentloaders/GoogleSheets/GoogleSheets.ts @@ -0,0 +1,429 @@ +import { omit } from 'lodash' +import { ICommonObject, IDocument, INode, INodeData, INodeParams, INodeOptionsValue } from '../../../src/Interface' +import { TextSplitter } from 'langchain/text_splitter' +import { + convertMultiOptionsToStringArray, + getCredentialData, + getCredentialParam, + handleEscapeCharacters, + INodeOutputsValue, + refreshOAuth2Token +} from '../../../src' + +class GoogleSheets_DocumentLoaders implements INode { + label: string + name: string + version: number + description: string + type: string + icon: string + category: string + baseClasses: string[] + credential: INodeParams + inputs: INodeParams[] + outputs: INodeOutputsValue[] + + constructor() { + this.label = 'Google Sheets' + this.name = 'googleSheets' + this.version = 1.0 + this.type = 'Document' + this.icon = 'google-sheets.svg' + this.category = 'Document Loaders' + this.description = `Load data from Google Sheets as documents` + this.baseClasses = [this.type] + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + description: 'Google Sheets OAuth2 Credential', + credentialNames: ['googleSheetsOAuth2'] + } + this.inputs = [ + { + label: 'Select Spreadsheet', + name: 'spreadsheetIds', + type: 'asyncMultiOptions', + loadMethod: 'listSpreadsheets', + description: 'Select spreadsheet from your Google Drive', + refresh: true + }, + { + label: 'Sheet Names', + name: 'sheetNames', + type: 'string', + description: 'Comma-separated list of sheet names to load. If empty, loads all sheets.', + placeholder: 'Sheet1, Sheet2', + optional: true + }, + { + label: 'Range', + name: 'range', + type: 'string', + description: 'Range to load (e.g., A1:E10). If empty, loads entire sheet.', + placeholder: 'A1:E10', + optional: true + }, + { + label: 'Include Headers', + name: 'includeHeaders', + type: 'boolean', + description: 'Whether to include the first row as headers', + default: true + }, + { + label: 'Value Render Option', + name: 'valueRenderOption', + type: 'options', + description: 'How values should be represented in the output', + options: [ + { + label: 'Formatted Value', + name: 'FORMATTED_VALUE' + }, + { + label: 'Unformatted Value', + name: 'UNFORMATTED_VALUE' + }, + { + label: 'Formula', + name: 'FORMULA' + } + ], + default: 'FORMATTED_VALUE', + optional: true + }, + { + label: 'Text Splitter', + name: 'textSplitter', + type: 'TextSplitter', + optional: true + }, + { + label: 'Additional Metadata', + name: 'metadata', + type: 'json', + description: 'Additional metadata to be added to the extracted documents', + optional: true, + additionalParams: true + }, + { + label: 'Omit Metadata Keys', + name: 'omitMetadataKeys', + type: 'string', + rows: 4, + description: + 'Each document loader comes with a default set of metadata keys that are extracted from the document. You can use this field to omit some of the default metadata keys. The value should be a list of keys, seperated by comma. Use * to omit all metadata keys execept the ones you specify in the Additional Metadata field', + placeholder: 'key1, key2, key3.nestedKey1', + optional: true, + additionalParams: true + } + ] + this.outputs = [ + { + label: 'Document', + name: 'document', + description: 'Array of document objects containing metadata and pageContent', + baseClasses: [...this.baseClasses, 'json'] + }, + { + label: 'Text', + name: 'text', + description: 'Concatenated string from pageContent of documents', + baseClasses: ['string', 'json'] + } + ] + } + + //@ts-ignore + loadMethods = { + async listSpreadsheets(nodeData: INodeData, options: ICommonObject): Promise { + const returnData: INodeOptionsValue[] = [] + + try { + let credentialData = await getCredentialData(nodeData.credential ?? '', options) + credentialData = await refreshOAuth2Token(nodeData.credential ?? '', credentialData, options) + const accessToken = getCredentialParam('access_token', credentialData, nodeData) + + if (!accessToken) { + return returnData + } + + // Query for Google Sheets files specifically + const query = "mimeType='application/vnd.google-apps.spreadsheet' and trashed = false" + + const url = new URL('https://www.googleapis.com/drive/v3/files') + url.searchParams.append('q', query) + url.searchParams.append('pageSize', '100') + url.searchParams.append('fields', 'files(id, name, modifiedTime, webViewLink)') + url.searchParams.append('orderBy', 'modifiedTime desc') + + const response = await fetch(url.toString(), { + headers: { + Authorization: `Bearer ${accessToken}`, + 'Content-Type': 'application/json' + } + }) + + if (!response.ok) { + console.error(`Failed to list spreadsheets: ${response.statusText}`) + return returnData + } + + const data = await response.json() + + for (const file of data.files) { + const obj: INodeOptionsValue = { + name: file.id, + label: file.name, + description: `Modified: ${new Date(file.modifiedTime).toLocaleDateString()}` + } + returnData.push(obj) + } + } catch (error) { + console.error('Error listing Google Sheets:', error) + } + + return returnData + } + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const _spreadsheetIds = nodeData.inputs?.spreadsheetIds as string + const sheetNames = nodeData.inputs?.sheetNames as string + const range = nodeData.inputs?.range as string + const includeHeaders = nodeData.inputs?.includeHeaders as boolean + const valueRenderOption = (nodeData.inputs?.valueRenderOption as string) || 'FORMATTED_VALUE' + const textSplitter = nodeData.inputs?.textSplitter as TextSplitter + const metadata = nodeData.inputs?.metadata + const _omitMetadataKeys = nodeData.inputs?.omitMetadataKeys as string + const output = nodeData.outputs?.output as string + + let omitMetadataKeys: string[] = [] + if (_omitMetadataKeys) { + omitMetadataKeys = _omitMetadataKeys.split(',').map((key) => key.trim()) + } + + if (!_spreadsheetIds) { + throw new Error('At least one spreadsheet is required') + } + + let spreadsheetIds = convertMultiOptionsToStringArray(_spreadsheetIds) + + let credentialData = await getCredentialData(nodeData.credential ?? '', options) + credentialData = await refreshOAuth2Token(nodeData.credential ?? '', credentialData, options) + const accessToken = getCredentialParam('access_token', credentialData, nodeData) + + if (!accessToken) { + throw new Error('No access token found in credential') + } + + let docs: IDocument[] = [] + + try { + // Process each spreadsheet + for (const spreadsheetId of spreadsheetIds) { + try { + // Get spreadsheet metadata first + const spreadsheetMetadata = await this.getSpreadsheetMetadata(spreadsheetId, accessToken) + + // Determine which sheets to load + let sheetsToLoad: string[] = [] + if (sheetNames) { + sheetsToLoad = sheetNames.split(',').map((name) => name.trim()) + } else { + // Get all sheet names from metadata + sheetsToLoad = spreadsheetMetadata.sheets?.map((sheet: any) => sheet.properties.title) || [] + } + + // Load data from each sheet + for (const sheetName of sheetsToLoad) { + const sheetRange = range ? `${sheetName}!${range}` : sheetName + const sheetData = await this.getSheetData(spreadsheetId, sheetRange, valueRenderOption, accessToken) + + if (sheetData.values && sheetData.values.length > 0) { + const sheetDoc = this.convertSheetToDocument( + sheetData, + sheetName, + spreadsheetId, + spreadsheetMetadata, + includeHeaders + ) + docs.push(sheetDoc) + } + } + } catch (error) { + console.warn(`Failed to process spreadsheet ${spreadsheetId}: ${error.message}`) + // Continue processing other spreadsheets even if one fails + } + } + + // Apply text splitter if provided + if (textSplitter && docs.length > 0) { + docs = await textSplitter.splitDocuments(docs) + } + + // Apply metadata transformations + if (metadata) { + const parsedMetadata = typeof metadata === 'object' ? metadata : JSON.parse(metadata) + docs = docs.map((doc) => ({ + ...doc, + metadata: + _omitMetadataKeys === '*' + ? { + ...parsedMetadata + } + : omit( + { + ...doc.metadata, + ...parsedMetadata + }, + omitMetadataKeys + ) + })) + } else { + docs = docs.map((doc) => ({ + ...doc, + metadata: + _omitMetadataKeys === '*' + ? {} + : omit( + { + ...doc.metadata + }, + omitMetadataKeys + ) + })) + } + } catch (error) { + throw new Error(`Failed to load Google Sheets data: ${error.message}`) + } + + if (output === 'document') { + return docs + } else { + let finaltext = '' + for (const doc of docs) { + finaltext += `${doc.pageContent}\n` + } + return handleEscapeCharacters(finaltext, false) + } + } + + private async getSpreadsheetMetadata(spreadsheetId: string, accessToken: string): Promise { + const url = `https://sheets.googleapis.com/v4/spreadsheets/${spreadsheetId}` + + const response = await fetch(url, { + headers: { + Authorization: `Bearer ${accessToken}`, + 'Content-Type': 'application/json' + } + }) + + if (!response.ok) { + const errorText = await response.text() + throw new Error(`Failed to get spreadsheet metadata: ${response.status} ${response.statusText} - ${errorText}`) + } + + return response.json() + } + + private async getSheetData(spreadsheetId: string, range: string, valueRenderOption: string, accessToken: string): Promise { + const url = `https://sheets.googleapis.com/v4/spreadsheets/${spreadsheetId}/values/${encodeURIComponent(range)}` + const params = new URLSearchParams({ + valueRenderOption, + dateTimeRenderOption: 'FORMATTED_STRING', + majorDimension: 'ROWS' + }) + + const response = await fetch(`${url}?${params}`, { + headers: { + Authorization: `Bearer ${accessToken}`, + 'Content-Type': 'application/json' + } + }) + + if (!response.ok) { + const errorText = await response.text() + throw new Error(`Failed to get sheet data: ${response.status} ${response.statusText} - ${errorText}`) + } + + return response.json() + } + + private convertSheetToDocument( + sheetData: any, + sheetName: string, + spreadsheetId: string, + spreadsheetMetadata: any, + includeHeaders: boolean + ): IDocument { + const values = sheetData.values || [] + + if (values.length === 0) { + return { + pageContent: '', + metadata: { + source: `Google Sheets: ${spreadsheetMetadata.properties?.title || 'Unknown'} - ${sheetName}`, + spreadsheetId, + sheetName, + spreadsheetTitle: spreadsheetMetadata.properties?.title, + range: sheetData.range, + rowCount: 0, + columnCount: 0 + } + } + } + + let headers: string[] = [] + let dataRows: string[][] = [] + + if (includeHeaders && values.length > 0) { + headers = values[0] || [] + dataRows = values.slice(1) + } else { + // Generate default headers like A, B, C, etc. + const maxColumns = Math.max(...values.map((row: any[]) => row.length)) + headers = Array.from({ length: maxColumns }, (_, i) => String.fromCharCode(65 + i)) + dataRows = values + } + + // Convert to markdown table format + let content = '' + + if (headers.length > 0) { + // Create header row + content += '| ' + headers.join(' | ') + ' |\n' + // Create separator row + content += '| ' + headers.map(() => '---').join(' | ') + ' |\n' + + // Add data rows + for (const row of dataRows) { + const paddedRow = [...row] + // Pad row to match header length + while (paddedRow.length < headers.length) { + paddedRow.push('') + } + content += '| ' + paddedRow.join(' | ') + ' |\n' + } + } + + return { + pageContent: content, + metadata: { + source: `Google Sheets: ${spreadsheetMetadata.properties?.title || 'Unknown'} - ${sheetName}`, + spreadsheetId, + sheetName, + spreadsheetTitle: spreadsheetMetadata.properties?.title, + spreadsheetUrl: `https://docs.google.com/spreadsheets/d/${spreadsheetId}`, + range: sheetData.range, + rowCount: values.length, + columnCount: headers.length, + headers: includeHeaders ? headers : undefined, + totalDataRows: dataRows.length + } + } + } +} + +module.exports = { nodeClass: GoogleSheets_DocumentLoaders } diff --git a/packages/components/nodes/documentloaders/GoogleSheets/google-sheets.svg b/packages/components/nodes/documentloaders/GoogleSheets/google-sheets.svg new file mode 100644 index 000000000..43af0ccf1 --- /dev/null +++ b/packages/components/nodes/documentloaders/GoogleSheets/google-sheets.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/components/nodes/documentloaders/Jira/jira.svg b/packages/components/nodes/documentloaders/Jira/jira.svg index 807c5a311..4ace5cc84 100644 --- a/packages/components/nodes/documentloaders/Jira/jira.svg +++ b/packages/components/nodes/documentloaders/Jira/jira.svg @@ -1,2 +1 @@ - - \ No newline at end of file + \ No newline at end of file diff --git a/packages/components/nodes/documentloaders/Json/Json.ts b/packages/components/nodes/documentloaders/Json/Json.ts index ee07f1369..042c81ef8 100644 --- a/packages/components/nodes/documentloaders/Json/Json.ts +++ b/packages/components/nodes/documentloaders/Json/Json.ts @@ -47,7 +47,7 @@ class Json_DocumentLoaders implements INode { constructor() { this.label = 'Json File' this.name = 'jsonFile' - this.version = 3.0 + this.version = 3.1 this.type = 'Document' this.icon = 'json.svg' this.category = 'Document Loaders' @@ -66,6 +66,14 @@ class Json_DocumentLoaders implements INode { type: 'TextSplitter', optional: true }, + { + label: 'Separate by JSON Object (JSON Array)', + name: 'separateByObject', + type: 'boolean', + description: 'If enabled and the file is a JSON Array, each JSON object will be extracted as a chunk', + optional: true, + additionalParams: true + }, { label: 'Pointers Extraction (separated by commas)', name: 'pointersName', @@ -73,7 +81,10 @@ class Json_DocumentLoaders implements INode { description: 'Ex: { "key": "value" }, Pointer Extraction = "key", "value" will be extracted as pageContent of the chunk. Use comma to separate multiple pointers', placeholder: 'key1, key2', - optional: true + optional: true, + hide: { + separateByObject: true + } }, { label: 'Additional Metadata', @@ -122,6 +133,7 @@ class Json_DocumentLoaders implements INode { const pointersName = nodeData.inputs?.pointersName as string const metadata = nodeData.inputs?.metadata const _omitMetadataKeys = nodeData.inputs?.omitMetadataKeys as string + const separateByObject = nodeData.inputs?.separateByObject as boolean const output = nodeData.outputs?.output as string let omitMetadataKeys: string[] = [] @@ -146,13 +158,14 @@ class Json_DocumentLoaders implements INode { } else { files = [fileName] } + const orgId = options.orgId const chatflowid = options.chatflowid for (const file of files) { if (!file) continue - const fileData = await getFileFromStorage(file, chatflowid) + const fileData = await getFileFromStorage(file, orgId, chatflowid) const blob = new Blob([fileData]) - const loader = new JSONLoader(blob, pointers.length != 0 ? pointers : undefined, metadata) + const loader = new JSONLoader(blob, pointers.length != 0 ? pointers : undefined, metadata, separateByObject) if (textSplitter) { let splittedDocs = await loader.load() @@ -175,7 +188,7 @@ class Json_DocumentLoaders implements INode { splitDataURI.pop() const bf = Buffer.from(splitDataURI.pop() || '', 'base64') const blob = new Blob([bf]) - const loader = new JSONLoader(blob, pointers.length != 0 ? pointers : undefined, metadata) + const loader = new JSONLoader(blob, pointers.length != 0 ? pointers : undefined, metadata, separateByObject) if (textSplitter) { let splittedDocs = await loader.load() @@ -305,13 +318,20 @@ class TextLoader extends BaseDocumentLoader { class JSONLoader extends TextLoader { public pointers: string[] private metadataMapping: Record + private separateByObject: boolean - constructor(filePathOrBlob: string | Blob, pointers: string | string[] = [], metadataMapping: Record = {}) { + constructor( + filePathOrBlob: string | Blob, + pointers: string | string[] = [], + metadataMapping: Record = {}, + separateByObject: boolean = false + ) { super(filePathOrBlob) this.pointers = Array.isArray(pointers) ? pointers : [pointers] if (metadataMapping) { this.metadataMapping = typeof metadataMapping === 'object' ? metadataMapping : JSON.parse(metadataMapping) } + this.separateByObject = separateByObject } protected async parse(raw: string): Promise { @@ -322,14 +342,24 @@ class JSONLoader extends TextLoader { const jsonArray = Array.isArray(json) ? json : [json] for (const item of jsonArray) { - const content = this.extractContent(item) - const metadata = this.extractMetadata(item) - - for (const pageContent of content) { - documents.push({ - pageContent, - metadata - }) + if (this.separateByObject) { + if (typeof item === 'object' && item !== null && !Array.isArray(item)) { + const metadata = this.extractMetadata(item) + const pageContent = this.formatObjectAsKeyValue(item) + documents.push({ + pageContent, + metadata + }) + } + } else { + const content = this.extractContent(item) + const metadata = this.extractMetadata(item) + for (const pageContent of content) { + documents.push({ + pageContent, + metadata + }) + } } } @@ -369,6 +399,30 @@ class JSONLoader extends TextLoader { return metadata } + /** + * Formats a JSON object as readable key-value pairs + */ + private formatObjectAsKeyValue(obj: any, prefix: string = ''): string { + const lines: string[] = [] + + for (const [key, value] of Object.entries(obj)) { + const fullKey = prefix ? `${prefix}.${key}` : key + + if (value === null || value === undefined) { + lines.push(`${fullKey}: ${value}`) + } else if (Array.isArray(value)) { + lines.push(`${fullKey}: ${JSON.stringify(value)}`) + } else if (typeof value === 'object') { + // Recursively format nested objects + lines.push(this.formatObjectAsKeyValue(value, fullKey)) + } else { + lines.push(`${fullKey}: ${value}`) + } + } + + return lines.join('\n') + } + /** * If JSON pointers are specified, return all strings below any of them * and exclude all other nodes expect if they match a JSON pointer. diff --git a/packages/components/nodes/documentloaders/Jsonlines/Jsonlines.ts b/packages/components/nodes/documentloaders/Jsonlines/Jsonlines.ts index fe345e00d..2b035dcfc 100644 --- a/packages/components/nodes/documentloaders/Jsonlines/Jsonlines.ts +++ b/packages/components/nodes/documentloaders/Jsonlines/Jsonlines.ts @@ -135,11 +135,12 @@ class Jsonlines_DocumentLoaders implements INode { } else { files = [fileName] } + const orgId = options.orgId const chatflowid = options.chatflowid for (const file of files) { if (!file) continue - const fileData = await getFileFromStorage(file, chatflowid) + const fileData = await getFileFromStorage(file, orgId, chatflowid) const blob = new Blob([fileData]) const loader = new JSONLinesLoader(blob, pointer, metadata) diff --git a/packages/components/nodes/documentloaders/MicrosoftExcel/ExcelLoader.ts b/packages/components/nodes/documentloaders/MicrosoftExcel/ExcelLoader.ts new file mode 100644 index 000000000..1e4889e1c --- /dev/null +++ b/packages/components/nodes/documentloaders/MicrosoftExcel/ExcelLoader.ts @@ -0,0 +1,72 @@ +import { Document } from '@langchain/core/documents' +import { BufferLoader } from 'langchain/document_loaders/fs/buffer' +import { read, utils } from 'xlsx' + +/** + * Document loader that uses SheetJS to load documents. + * + * Each worksheet is parsed into an array of row objects using the SheetJS + * `sheet_to_json` method and projected to a `Document`. Metadata includes + * original sheet name, row data, and row index + */ +export class LoadOfSheet extends BufferLoader { + attributes: { name: string; description: string; type: string }[] = [] + + constructor(filePathOrBlob: string | Blob) { + super(filePathOrBlob) + this.attributes = [] + } + + /** + * Parse document + * + * NOTE: column labels in multiple sheets are not disambiguated! + * + * @param raw Raw data Buffer + * @param metadata Document metadata + * @returns Array of Documents + */ + async parse(raw: Buffer, metadata: Document['metadata']): Promise { + const result: Document[] = [] + + this.attributes = [ + { name: 'worksheet', description: 'Sheet or Worksheet Name', type: 'string' }, + { name: 'rowNum', description: 'Row index', type: 'number' } + ] + + const wb = read(raw, { type: 'buffer' }) + for (let name of wb.SheetNames) { + const fields: Record> = {} + const ws = wb.Sheets[name] + if (!ws) continue + + const aoo = utils.sheet_to_json(ws) as Record[] + aoo.forEach((row) => { + result.push({ + pageContent: + Object.entries(row) + .map((kv) => `- ${kv[0]}: ${kv[1]}`) + .join('\n') + '\n', + metadata: { + worksheet: name, + rowNum: row['__rowNum__'], + ...metadata, + ...row + } + }) + Object.entries(row).forEach(([k, v]) => { + if (v != null) (fields[k] || (fields[k] = {}))[v instanceof Date ? 'date' : typeof v] = true + }) + }) + Object.entries(fields).forEach(([k, v]) => + this.attributes.push({ + name: k, + description: k, + type: Object.keys(v).join(' or ') + }) + ) + } + + return result + } +} diff --git a/packages/components/nodes/documentloaders/MicrosoftExcel/MicrosoftExcel.ts b/packages/components/nodes/documentloaders/MicrosoftExcel/MicrosoftExcel.ts new file mode 100644 index 000000000..468d16731 --- /dev/null +++ b/packages/components/nodes/documentloaders/MicrosoftExcel/MicrosoftExcel.ts @@ -0,0 +1,142 @@ +import { TextSplitter } from 'langchain/text_splitter' +import { LoadOfSheet } from './ExcelLoader' +import { getFileFromStorage, handleDocumentLoaderDocuments, handleDocumentLoaderMetadata, handleDocumentLoaderOutput } from '../../../src' +import { ICommonObject, IDocument, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' + +class MicrosoftExcel_DocumentLoaders implements INode { + label: string + name: string + version: number + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + outputs: INodeOutputsValue[] + + constructor() { + this.label = 'Microsoft Excel' + this.name = 'microsoftExcel' + this.version = 1.0 + this.type = 'Document' + this.icon = 'excel.svg' + this.category = 'Document Loaders' + this.description = `Load data from Microsoft Excel files` + this.baseClasses = [this.type] + this.inputs = [ + { + label: 'Excel File', + name: 'excelFile', + type: 'file', + fileType: '.xlsx, .xls, .xlsm, .xlsb' + }, + { + label: 'Text Splitter', + name: 'textSplitter', + type: 'TextSplitter', + optional: true + }, + { + label: 'Additional Metadata', + name: 'metadata', + type: 'json', + description: 'Additional metadata to be added to the extracted documents', + optional: true, + additionalParams: true + }, + { + label: 'Omit Metadata Keys', + name: 'omitMetadataKeys', + type: 'string', + rows: 4, + description: + 'Each document loader comes with a default set of metadata keys that are extracted from the document. You can use this field to omit some of the default metadata keys. The value should be a list of keys, seperated by comma. Use * to omit all metadata keys execept the ones you specify in the Additional Metadata field', + placeholder: 'key1, key2, key3.nestedKey1', + optional: true, + additionalParams: true + } + ] + this.outputs = [ + { + label: 'Document', + name: 'document', + description: 'Array of document objects containing metadata and pageContent', + baseClasses: [...this.baseClasses, 'json'] + }, + { + label: 'Text', + name: 'text', + description: 'Concatenated string from pageContent of documents', + baseClasses: ['string', 'json'] + } + ] + } + + getFiles(nodeData: INodeData) { + const excelFileBase64 = nodeData.inputs?.excelFile as string + + let files: string[] = [] + let fromStorage: boolean = true + + if (excelFileBase64.startsWith('FILE-STORAGE::')) { + const fileName = excelFileBase64.replace('FILE-STORAGE::', '') + if (fileName.startsWith('[') && fileName.endsWith(']')) { + files = JSON.parse(fileName) + } else { + files = [fileName] + } + } else { + if (excelFileBase64.startsWith('[') && excelFileBase64.endsWith(']')) { + files = JSON.parse(excelFileBase64) + } else { + files = [excelFileBase64] + } + + fromStorage = false + } + + return { files, fromStorage } + } + + async getFileData(file: string, { orgId, chatflowid }: { orgId: string; chatflowid: string }, fromStorage?: boolean) { + if (fromStorage) { + return getFileFromStorage(file, orgId, chatflowid) + } else { + const splitDataURI = file.split(',') + splitDataURI.pop() + return Buffer.from(splitDataURI.pop() || '', 'base64') + } + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const textSplitter = nodeData.inputs?.textSplitter as TextSplitter + const metadata = nodeData.inputs?.metadata + const output = nodeData.outputs?.output as string + const _omitMetadataKeys = nodeData.inputs?.omitMetadataKeys as string + + let docs: IDocument[] = [] + + const orgId = options.orgId + const chatflowid = options.chatflowid + + const { files, fromStorage } = this.getFiles(nodeData) + + for (const file of files) { + if (!file) continue + + const fileData = await this.getFileData(file, { orgId, chatflowid }, fromStorage) + const blob = new Blob([fileData]) + const loader = new LoadOfSheet(blob) + + // use spread instead of push, because it raises RangeError: Maximum call stack size exceeded when too many docs + docs = [...docs, ...(await handleDocumentLoaderDocuments(loader, textSplitter))] + } + + docs = handleDocumentLoaderMetadata(docs, _omitMetadataKeys, metadata) + + return handleDocumentLoaderOutput(docs, output) + } +} + +module.exports = { nodeClass: MicrosoftExcel_DocumentLoaders } diff --git a/packages/components/nodes/documentloaders/MicrosoftExcel/excel.svg b/packages/components/nodes/documentloaders/MicrosoftExcel/excel.svg new file mode 100644 index 000000000..22d8f9497 --- /dev/null +++ b/packages/components/nodes/documentloaders/MicrosoftExcel/excel.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/components/nodes/documentloaders/MicrosoftPowerpoint/MicrosoftPowerpoint.ts b/packages/components/nodes/documentloaders/MicrosoftPowerpoint/MicrosoftPowerpoint.ts new file mode 100644 index 000000000..bca5e9a5b --- /dev/null +++ b/packages/components/nodes/documentloaders/MicrosoftPowerpoint/MicrosoftPowerpoint.ts @@ -0,0 +1,142 @@ +import { TextSplitter } from 'langchain/text_splitter' +import { PowerpointLoader } from './PowerpointLoader' +import { getFileFromStorage, handleDocumentLoaderDocuments, handleDocumentLoaderMetadata, handleDocumentLoaderOutput } from '../../../src' +import { ICommonObject, IDocument, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' + +class MicrosoftPowerpoint_DocumentLoaders implements INode { + label: string + name: string + version: number + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + outputs: INodeOutputsValue[] + + constructor() { + this.label = 'Microsoft PowerPoint' + this.name = 'microsoftPowerpoint' + this.version = 1.0 + this.type = 'Document' + this.icon = 'powerpoint.svg' + this.category = 'Document Loaders' + this.description = `Load data from Microsoft PowerPoint files` + this.baseClasses = [this.type] + this.inputs = [ + { + label: 'PowerPoint File', + name: 'powerpointFile', + type: 'file', + fileType: '.pptx, .ppt' + }, + { + label: 'Text Splitter', + name: 'textSplitter', + type: 'TextSplitter', + optional: true + }, + { + label: 'Additional Metadata', + name: 'metadata', + type: 'json', + description: 'Additional metadata to be added to the extracted documents', + optional: true, + additionalParams: true + }, + { + label: 'Omit Metadata Keys', + name: 'omitMetadataKeys', + type: 'string', + rows: 4, + description: + 'Each document loader comes with a default set of metadata keys that are extracted from the document. You can use this field to omit some of the default metadata keys. The value should be a list of keys, seperated by comma. Use * to omit all metadata keys execept the ones you specify in the Additional Metadata field', + placeholder: 'key1, key2, key3.nestedKey1', + optional: true, + additionalParams: true + } + ] + this.outputs = [ + { + label: 'Document', + name: 'document', + description: 'Array of document objects containing metadata and pageContent', + baseClasses: [...this.baseClasses, 'json'] + }, + { + label: 'Text', + name: 'text', + description: 'Concatenated string from pageContent of documents', + baseClasses: ['string', 'json'] + } + ] + } + + getFiles(nodeData: INodeData) { + const powerpointFileBase64 = nodeData.inputs?.powerpointFile as string + + let files: string[] = [] + let fromStorage: boolean = true + + if (powerpointFileBase64.startsWith('FILE-STORAGE::')) { + const fileName = powerpointFileBase64.replace('FILE-STORAGE::', '') + if (fileName.startsWith('[') && fileName.endsWith(']')) { + files = JSON.parse(fileName) + } else { + files = [fileName] + } + } else { + if (powerpointFileBase64.startsWith('[') && powerpointFileBase64.endsWith(']')) { + files = JSON.parse(powerpointFileBase64) + } else { + files = [powerpointFileBase64] + } + + fromStorage = false + } + + return { files, fromStorage } + } + + async getFileData(file: string, { orgId, chatflowid }: { orgId: string; chatflowid: string }, fromStorage?: boolean) { + if (fromStorage) { + return getFileFromStorage(file, orgId, chatflowid) + } else { + const splitDataURI = file.split(',') + splitDataURI.pop() + return Buffer.from(splitDataURI.pop() || '', 'base64') + } + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const textSplitter = nodeData.inputs?.textSplitter as TextSplitter + const metadata = nodeData.inputs?.metadata + const output = nodeData.outputs?.output as string + const _omitMetadataKeys = nodeData.inputs?.omitMetadataKeys as string + + let docs: IDocument[] = [] + + const orgId = options.orgId + const chatflowid = options.chatflowid + + const { files, fromStorage } = this.getFiles(nodeData) + + for (const file of files) { + if (!file) continue + + const fileData = await this.getFileData(file, { orgId, chatflowid }, fromStorage) + const blob = new Blob([fileData]) + const loader = new PowerpointLoader(blob) + + // use spread instead of push, because it raises RangeError: Maximum call stack size exceeded when too many docs + docs = [...docs, ...(await handleDocumentLoaderDocuments(loader, textSplitter))] + } + + docs = handleDocumentLoaderMetadata(docs, _omitMetadataKeys, metadata) + + return handleDocumentLoaderOutput(docs, output) + } +} + +module.exports = { nodeClass: MicrosoftPowerpoint_DocumentLoaders } diff --git a/packages/components/nodes/documentloaders/MicrosoftPowerpoint/PowerpointLoader.ts b/packages/components/nodes/documentloaders/MicrosoftPowerpoint/PowerpointLoader.ts new file mode 100644 index 000000000..97f266826 --- /dev/null +++ b/packages/components/nodes/documentloaders/MicrosoftPowerpoint/PowerpointLoader.ts @@ -0,0 +1,101 @@ +import { Document } from '@langchain/core/documents' +import { BufferLoader } from 'langchain/document_loaders/fs/buffer' +import { parseOfficeAsync } from 'officeparser' + +/** + * Document loader that uses officeparser to load PowerPoint documents. + * + * Each slide is parsed into a separate Document with metadata including + * slide number and extracted text content. + */ +export class PowerpointLoader extends BufferLoader { + attributes: { name: string; description: string; type: string }[] = [] + + constructor(filePathOrBlob: string | Blob) { + super(filePathOrBlob) + this.attributes = [] + } + + /** + * Parse PowerPoint document + * + * @param raw Raw data Buffer + * @param metadata Document metadata + * @returns Array of Documents + */ + async parse(raw: Buffer, metadata: Document['metadata']): Promise { + const result: Document[] = [] + + this.attributes = [ + { name: 'slideNumber', description: 'Slide number', type: 'number' }, + { name: 'documentType', description: 'Type of document', type: 'string' } + ] + + try { + // Use officeparser to extract text from PowerPoint + const data = await parseOfficeAsync(raw) + + if (typeof data === 'string' && data.trim()) { + // Split content by common slide separators or use the entire content as one document + const slides = this.splitIntoSlides(data) + + slides.forEach((slideContent, index) => { + if (slideContent.trim()) { + result.push({ + pageContent: slideContent.trim(), + metadata: { + slideNumber: index + 1, + documentType: 'powerpoint', + ...metadata + } + }) + } + }) + } + } catch (error) { + console.error('Error parsing PowerPoint file:', error) + throw new Error(`Failed to parse PowerPoint file: ${error instanceof Error ? error.message : 'Unknown error'}`) + } + + return result + } + + /** + * Split content into slides based on common patterns + * This is a heuristic approach since officeparser returns plain text + */ + private splitIntoSlides(content: string): string[] { + // Try to split by common slide patterns + const slidePatterns = [ + /\n\s*Slide\s+\d+/gi, + /\n\s*Page\s+\d+/gi, + /\n\s*\d+\s*\/\s*\d+/gi, + /\n\s*_{3,}/g, // Underscores as separators + /\n\s*-{3,}/g // Dashes as separators + ] + + let slides: string[] = [] + + // Try each pattern and use the one that creates the most reasonable splits + for (const pattern of slidePatterns) { + const potentialSlides = content.split(pattern) + if (potentialSlides.length > 1 && potentialSlides.length < 100) { + // Reasonable number of slides + slides = potentialSlides + break + } + } + + // If no good pattern found, split by double newlines as a fallback + if (slides.length === 0) { + slides = content.split(/\n\s*\n\s*\n/) + } + + // If still no good split, treat entire content as one slide + if (slides.length === 0 || slides.every((slide) => slide.trim().length < 10)) { + slides = [content] + } + + return slides.filter((slide) => slide.trim().length > 0) + } +} diff --git a/packages/components/nodes/documentloaders/MicrosoftPowerpoint/powerpoint.svg b/packages/components/nodes/documentloaders/MicrosoftPowerpoint/powerpoint.svg new file mode 100644 index 000000000..4d2f7b2a1 --- /dev/null +++ b/packages/components/nodes/documentloaders/MicrosoftPowerpoint/powerpoint.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/components/nodes/documentloaders/MicrosoftWord/MicrosoftWord.ts b/packages/components/nodes/documentloaders/MicrosoftWord/MicrosoftWord.ts new file mode 100644 index 000000000..7d74af259 --- /dev/null +++ b/packages/components/nodes/documentloaders/MicrosoftWord/MicrosoftWord.ts @@ -0,0 +1,142 @@ +import { TextSplitter } from 'langchain/text_splitter' +import { WordLoader } from './WordLoader' +import { getFileFromStorage, handleDocumentLoaderDocuments, handleDocumentLoaderMetadata, handleDocumentLoaderOutput } from '../../../src' +import { ICommonObject, IDocument, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' + +class MicrosoftWord_DocumentLoaders implements INode { + label: string + name: string + version: number + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + outputs: INodeOutputsValue[] + + constructor() { + this.label = 'Microsoft Word' + this.name = 'microsoftWord' + this.version = 1.0 + this.type = 'Document' + this.icon = 'word.svg' + this.category = 'Document Loaders' + this.description = `Load data from Microsoft Word files` + this.baseClasses = [this.type] + this.inputs = [ + { + label: 'Word File', + name: 'docxFile', + type: 'file', + fileType: '.docx, .doc' + }, + { + label: 'Text Splitter', + name: 'textSplitter', + type: 'TextSplitter', + optional: true + }, + { + label: 'Additional Metadata', + name: 'metadata', + type: 'json', + description: 'Additional metadata to be added to the extracted documents', + optional: true, + additionalParams: true + }, + { + label: 'Omit Metadata Keys', + name: 'omitMetadataKeys', + type: 'string', + rows: 4, + description: + 'Each document loader comes with a default set of metadata keys that are extracted from the document. You can use this field to omit some of the default metadata keys. The value should be a list of keys, seperated by comma. Use * to omit all metadata keys execept the ones you specify in the Additional Metadata field', + placeholder: 'key1, key2, key3.nestedKey1', + optional: true, + additionalParams: true + } + ] + this.outputs = [ + { + label: 'Document', + name: 'document', + description: 'Array of document objects containing metadata and pageContent', + baseClasses: [...this.baseClasses, 'json'] + }, + { + label: 'Text', + name: 'text', + description: 'Concatenated string from pageContent of documents', + baseClasses: ['string', 'json'] + } + ] + } + + getFiles(nodeData: INodeData) { + const docxFileBase64 = nodeData.inputs?.docxFile as string + + let files: string[] = [] + let fromStorage: boolean = true + + if (docxFileBase64.startsWith('FILE-STORAGE::')) { + const fileName = docxFileBase64.replace('FILE-STORAGE::', '') + if (fileName.startsWith('[') && fileName.endsWith(']')) { + files = JSON.parse(fileName) + } else { + files = [fileName] + } + } else { + if (docxFileBase64.startsWith('[') && docxFileBase64.endsWith(']')) { + files = JSON.parse(docxFileBase64) + } else { + files = [docxFileBase64] + } + + fromStorage = false + } + + return { files, fromStorage } + } + + async getFileData(file: string, { orgId, chatflowid }: { orgId: string; chatflowid: string }, fromStorage?: boolean) { + if (fromStorage) { + return getFileFromStorage(file, orgId, chatflowid) + } else { + const splitDataURI = file.split(',') + splitDataURI.pop() + return Buffer.from(splitDataURI.pop() || '', 'base64') + } + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const textSplitter = nodeData.inputs?.textSplitter as TextSplitter + const metadata = nodeData.inputs?.metadata + const output = nodeData.outputs?.output as string + const _omitMetadataKeys = nodeData.inputs?.omitMetadataKeys as string + + let docs: IDocument[] = [] + + const orgId = options.orgId + const chatflowid = options.chatflowid + + const { files, fromStorage } = this.getFiles(nodeData) + + for (const file of files) { + if (!file) continue + + const fileData = await this.getFileData(file, { orgId, chatflowid }, fromStorage) + const blob = new Blob([fileData]) + const loader = new WordLoader(blob) + + // use spread instead of push, because it raises RangeError: Maximum call stack size exceeded when too many docs + docs = [...docs, ...(await handleDocumentLoaderDocuments(loader, textSplitter))] + } + + docs = handleDocumentLoaderMetadata(docs, _omitMetadataKeys, metadata) + + return handleDocumentLoaderOutput(docs, output) + } +} + +module.exports = { nodeClass: MicrosoftWord_DocumentLoaders } diff --git a/packages/components/nodes/documentloaders/MicrosoftWord/WordLoader.ts b/packages/components/nodes/documentloaders/MicrosoftWord/WordLoader.ts new file mode 100644 index 000000000..640e2c4ff --- /dev/null +++ b/packages/components/nodes/documentloaders/MicrosoftWord/WordLoader.ts @@ -0,0 +1,108 @@ +import { Document } from '@langchain/core/documents' +import { BufferLoader } from 'langchain/document_loaders/fs/buffer' +import { parseOfficeAsync } from 'officeparser' + +/** + * Document loader that uses officeparser to load Word documents. + * + * The document is parsed into a single Document with metadata including + * document type and extracted text content. + */ +export class WordLoader extends BufferLoader { + attributes: { name: string; description: string; type: string }[] = [] + + constructor(filePathOrBlob: string | Blob) { + super(filePathOrBlob) + this.attributes = [] + } + + /** + * Parse Word document + * + * @param raw Raw data Buffer + * @param metadata Document metadata + * @returns Array of Documents + */ + async parse(raw: Buffer, metadata: Document['metadata']): Promise { + const result: Document[] = [] + + this.attributes = [ + { name: 'documentType', description: 'Type of document', type: 'string' }, + { name: 'pageCount', description: 'Number of pages/sections', type: 'number' } + ] + + try { + // Use officeparser to extract text from Word document + const data = await parseOfficeAsync(raw) + + if (typeof data === 'string' && data.trim()) { + // Split content by common page/section separators + const sections = this.splitIntoSections(data) + + sections.forEach((sectionContent, index) => { + if (sectionContent.trim()) { + result.push({ + pageContent: sectionContent.trim(), + metadata: { + documentType: 'word', + pageNumber: index + 1, + ...metadata + } + }) + } + }) + } + } catch (error) { + console.error('Error parsing Word file:', error) + throw new Error(`Failed to parse Word file: ${error instanceof Error ? error.message : 'Unknown error'}`) + } + + return result + } + + /** + * Split content into sections based on common patterns + * This is a heuristic approach since officeparser returns plain text + */ + private splitIntoSections(content: string): string[] { + // Try to split by common section patterns + const sectionPatterns = [ + /\n\s*Page\s+\d+/gi, + /\n\s*Section\s+\d+/gi, + /\n\s*Chapter\s+\d+/gi, + /\n\s*\d+\.\s+/gi, // Numbered sections like "1. ", "2. " + /\n\s*[A-Z][A-Z\s]{2,}\n/g, // ALL CAPS headings + /\n\s*_{5,}/g, // Long underscores as separators + /\n\s*-{5,}/g // Long dashes as separators + ] + + let sections: string[] = [] + + // Try each pattern and use the one that creates the most reasonable splits + for (const pattern of sectionPatterns) { + const potentialSections = content.split(pattern) + if (potentialSections.length > 1 && potentialSections.length < 50) { + // Reasonable number of sections + sections = potentialSections + break + } + } + + // If no good pattern found, split by multiple newlines as a fallback + if (sections.length === 0) { + sections = content.split(/\n\s*\n\s*\n\s*\n/) + } + + // If still no good split, split by double newlines + if (sections.length === 0 || sections.every((section) => section.trim().length < 20)) { + sections = content.split(/\n\s*\n\s*\n/) + } + + // If still no good split, treat entire content as one section + if (sections.length === 0 || sections.every((section) => section.trim().length < 10)) { + sections = [content] + } + + return sections.filter((section) => section.trim().length > 0) + } +} diff --git a/packages/components/nodes/documentloaders/MicrosoftWord/word.svg b/packages/components/nodes/documentloaders/MicrosoftWord/word.svg new file mode 100644 index 000000000..dabac0ea5 --- /dev/null +++ b/packages/components/nodes/documentloaders/MicrosoftWord/word.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/components/nodes/documentloaders/Oxylabs/Oxylabs.ts b/packages/components/nodes/documentloaders/Oxylabs/Oxylabs.ts new file mode 100644 index 000000000..c036eb7bf --- /dev/null +++ b/packages/components/nodes/documentloaders/Oxylabs/Oxylabs.ts @@ -0,0 +1,336 @@ +import { TextSplitter } from 'langchain/text_splitter' +import { DocumentInterface } from '@langchain/core/documents' +import { BaseDocumentLoader } from 'langchain/document_loaders/base' +import { INode, INodeData, INodeParams, ICommonObject, INodeOutputsValue } from '../../../src/Interface' +import { getCredentialData, getCredentialParam, handleEscapeCharacters } from '../../../src/utils' +import axios, { AxiosResponse } from 'axios' + +interface OxylabsDocument extends DocumentInterface {} + +interface OxylabsResponse { + results: Result[] + job: Job +} + +interface Result { + content: any + created_at: string + updated_at: string + page: number + url: string + job_id: string + is_render_forced: boolean + status_code: number + parser_type: string +} + +interface Job { + callback_url: string + client_id: number + context: any + created_at: string + domain: string + geo_location: any + id: string + limit: number + locale: any + pages: number + parse: boolean + parser_type: any + parser_preset: any + parsing_instructions: any + browser_instructions: any + render: any + url: any + query: string + source: string + start_page: number + status: string + storage_type: any + storage_url: any + subdomain: string + content_encoding: string + updated_at: string + user_agent_type: string + is_premium_domain: boolean +} + +interface OxylabsLoaderParameters { + username: string + password: string + query: string + source: string + geo_location: string + render: boolean + parse: boolean + user_agent_type: string +} + +export class OxylabsLoader extends BaseDocumentLoader { + private params: OxylabsLoaderParameters + + constructor(loaderParams: OxylabsLoaderParameters) { + super() + this.params = loaderParams + } + + private async sendAPIRequest(params: any): Promise> { + params = Object.fromEntries(Object.entries(params).filter(([_, value]) => value !== null && value !== '' && value !== undefined)) + + const auth = Buffer.from(`${this.params.username}:${this.params.password}`).toString('base64') + + const response = await axios.post('https://realtime.oxylabs.io/v1/queries', params, { + headers: { + 'Content-Type': 'application/json', + 'x-oxylabs-sdk': 'oxylabs-integration-flowise/1.0.0 (1.0.0; 64bit)', + Authorization: `Basic ${auth}` + } + }) + + if (response.status >= 400) { + throw new Error(`Oxylabs: Failed to call Oxylabs API: ${response.status}`) + } + + return response + } + + public async load(): Promise { + let isUrlSource = this.params.source == 'universal' + + const params = { + source: this.params.source, + geo_location: this.params.geo_location, + render: this.params.render ? 'html' : null, + parse: this.params.parse, + user_agent_type: this.params.user_agent_type, + markdown: !this.params.parse, + url: isUrlSource ? this.params.query : null, + query: !isUrlSource ? this.params.query : null + } + + const response = await this.sendAPIRequest(params) + + const docs: OxylabsDocument[] = response.data.results.map((result, index) => { + const content = typeof result.content === 'string' ? result.content : JSON.stringify(result.content) + return { + id: `${response.data.job.id.toString()}-${index}`, + pageContent: content, + metadata: {} + } + }) + + return docs + } +} + +class Oxylabs_DocumentLoaders implements INode { + label: string + name: string + description: string + type: string + icon: string + version: number + category: string + baseClasses: string[] + inputs: INodeParams[] + credential: INodeParams + outputs: INodeOutputsValue[] + + constructor() { + this.label = 'Oxylabs' + this.name = 'oxylabs' + this.type = 'Document' + this.icon = 'oxylabs.svg' + this.version = 1.0 + this.category = 'Document Loaders' + this.description = 'Extract data from URLs using Oxylabs' + this.baseClasses = [this.type] + this.credential = { + label: 'Oxylabs API', + name: 'credential', + type: 'credential', + credentialNames: ['oxylabsApi'] + } + this.inputs = [ + { + label: 'Text Splitter', + name: 'textSplitter', + type: 'TextSplitter', + optional: false + }, + { + label: 'Query', + name: 'query', + type: 'string', + description: 'Website URL of query keyword.' + }, + { + label: 'Source', + name: 'source', + type: 'options', + description: 'Target website to scrape.', + options: [ + { + label: 'Universal', + name: 'universal' + }, + { + label: 'Google Search', + name: 'google_search' + }, + { + label: 'Amazon Product', + name: 'amazon_product' + }, + { + label: 'Amazon Search', + name: 'amazon_search' + } + ], + default: 'universal' + }, + { + label: 'Geolocation', + name: 'geo_location', + type: 'string', + description: "Sets the proxy's geo location to retrieve data. Check Oxylabs documentation for more details.", + optional: true + }, + { + label: 'Render', + name: 'render', + type: 'boolean', + description: 'Enables JavaScript rendering when set to true.', + optional: true, + default: false + }, + { + label: 'Parse', + name: 'parse', + type: 'boolean', + description: + "Returns parsed data when set to true, as long as a dedicated parser exists for the submitted URL's page type.", + optional: true, + default: false + }, + { + label: 'User Agent Type', + name: 'user_agent_type', + type: 'options', + description: 'Device type and browser.', + options: [ + { + label: 'Desktop', + name: 'desktop' + }, + { + label: 'Desktop Chrome', + name: 'desktop_chrome' + }, + { + label: 'Desktop Edge', + name: 'desktop_edge' + }, + { + label: 'Desktop Firefox', + name: 'desktop_firefox' + }, + { + label: 'Desktop Opera', + name: 'desktop_opera' + }, + { + label: 'Desktop Safari', + name: 'desktop_safari' + }, + { + label: 'Mobile', + name: 'mobile' + }, + { + label: 'Mobile Android', + name: 'mobile_android' + }, + { + label: 'Mobile iOS', + name: 'mobile_ios' + }, + { + label: 'Tablet', + name: 'tablet' + }, + { + label: 'Tablet Android', + name: 'tablet_android' + }, + { + label: 'Tablet iOS', + name: 'tablet_ios' + } + ], + optional: true + } + ] + this.outputs = [ + { + label: 'Document', + name: 'document', + description: 'Array of document objects containing metadata and pageContent', + baseClasses: [...this.baseClasses, 'json'] + }, + { + label: 'Text', + name: 'text', + description: 'Concatenated string from pageContent of documents', + baseClasses: ['string', 'json'] + } + ] + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const query = nodeData.inputs?.query as string + const textSplitter = nodeData.inputs?.textSplitter as TextSplitter + const source = nodeData.inputs?.source as string + const geo_location = nodeData.inputs?.geo_location as string + const render = nodeData.inputs?.render as boolean + const parse = nodeData.inputs?.parse as boolean + const user_agent_type = nodeData.inputs?.user_agent_type as string + + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const username = getCredentialParam('username', credentialData, nodeData) + const password = getCredentialParam('password', credentialData, nodeData) + + const output = nodeData.outputs?.output as string + + const input: OxylabsLoaderParameters = { + username, + password, + query, + source, + geo_location, + render, + parse, + user_agent_type + } + + const loader = new OxylabsLoader(input) + + let docs: OxylabsDocument[] = await loader.load() + + if (textSplitter && docs.length > 0) { + docs = await textSplitter.splitDocuments(docs) + } + + if (output === 'document') { + return docs + } else { + let finaltext = '' + for (const doc of docs) { + finaltext += `${doc.pageContent}\n` + } + return handleEscapeCharacters(finaltext, false) + } + } +} + +module.exports = { nodeClass: Oxylabs_DocumentLoaders } diff --git a/packages/components/nodes/documentloaders/Oxylabs/oxylabs.svg b/packages/components/nodes/documentloaders/Oxylabs/oxylabs.svg new file mode 100644 index 000000000..2ddd10b01 --- /dev/null +++ b/packages/components/nodes/documentloaders/Oxylabs/oxylabs.svg @@ -0,0 +1,4 @@ + + + + diff --git a/packages/components/nodes/documentloaders/Pdf/Pdf.ts b/packages/components/nodes/documentloaders/Pdf/Pdf.ts index 7e4c777ff..52f5197cf 100644 --- a/packages/components/nodes/documentloaders/Pdf/Pdf.ts +++ b/packages/components/nodes/documentloaders/Pdf/Pdf.ts @@ -122,11 +122,12 @@ class Pdf_DocumentLoaders implements INode { } else { files = [fileName] } + const orgId = options.orgId const chatflowid = options.chatflowid for (const file of files) { if (!file) continue - const fileData = await getFileFromStorage(file, chatflowid) + const fileData = await getFileFromStorage(file, orgId, chatflowid) const bf = Buffer.from(fileData) await this.extractDocs(usage, bf, legacyBuild, textSplitter, docs) } diff --git a/packages/components/nodes/documentloaders/Playwright/Playwright.ts b/packages/components/nodes/documentloaders/Playwright/Playwright.ts index f0946dbd8..f05074593 100644 --- a/packages/components/nodes/documentloaders/Playwright/Playwright.ts +++ b/packages/components/nodes/documentloaders/Playwright/Playwright.ts @@ -1,14 +1,15 @@ -import { omit } from 'lodash' -import { ICommonObject, IDocument, INode, INodeData, INodeParams } from '../../../src/Interface' -import { TextSplitter } from 'langchain/text_splitter' import { Browser, Page, PlaywrightWebBaseLoader, PlaywrightWebBaseLoaderOptions } from '@langchain/community/document_loaders/web/playwright' +import { Document } from '@langchain/core/documents' +import { TextSplitter } from 'langchain/text_splitter' import { test } from 'linkifyjs' +import { omit } from 'lodash' import { handleEscapeCharacters, INodeOutputsValue, webCrawl, xmlScrape } from '../../../src' +import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' class Playwright_DocumentLoaders implements INode { label: string @@ -113,6 +114,14 @@ class Playwright_DocumentLoaders implements INode { additionalParams: true, description: 'CSS selectors like .div or #div' }, + { + label: 'CSS Selector (Optional)', + name: 'cssSelector', + type: 'string', + description: 'Only content inside this selector will be extracted. Leave empty to use the entire page body.', + optional: true, + additionalParams: true + }, { label: 'Additional Metadata', name: 'metadata', @@ -155,10 +164,17 @@ class Playwright_DocumentLoaders implements INode { const relativeLinksMethod = nodeData.inputs?.relativeLinksMethod as string const selectedLinks = nodeData.inputs?.selectedLinks as string[] let limit = parseInt(nodeData.inputs?.limit as string) - let waitUntilGoToOption = nodeData.inputs?.waitUntilGoToOption as 'load' | 'domcontentloaded' | 'networkidle' | 'commit' | undefined - let waitForSelector = nodeData.inputs?.waitForSelector as string + const waitUntilGoToOption = nodeData.inputs?.waitUntilGoToOption as + | 'load' + | 'domcontentloaded' + | 'networkidle' + | 'commit' + | undefined + const waitForSelector = nodeData.inputs?.waitForSelector as string + const cssSelector = nodeData.inputs?.cssSelector as string const _omitMetadataKeys = nodeData.inputs?.omitMetadataKeys as string const output = nodeData.outputs?.output as string + const orgId = options.orgId let omitMetadataKeys: string[] = [] if (_omitMetadataKeys) { @@ -171,13 +187,17 @@ class Playwright_DocumentLoaders implements INode { throw new Error('Invalid URL') } - async function playwrightLoader(url: string): Promise { + async function playwrightLoader(url: string): Promise { try { let docs = [] + + const executablePath = process.env.PLAYWRIGHT_EXECUTABLE_PATH + const config: PlaywrightWebBaseLoaderOptions = { launchOptions: { args: ['--no-sandbox'], - headless: true + headless: true, + executablePath: executablePath } } if (waitUntilGoToOption) { @@ -185,12 +205,22 @@ class Playwright_DocumentLoaders implements INode { waitUntil: waitUntilGoToOption } } - if (waitForSelector) { + if (cssSelector || waitForSelector) { config['evaluate'] = async (page: Page, _: Browser): Promise => { - await page.waitForSelector(waitForSelector) + if (waitForSelector) { + await page.waitForSelector(waitForSelector) + } - const result = await page.evaluate(() => document.body.innerHTML) - return result + if (cssSelector) { + const selectorHandle = await page.$(cssSelector) + const result = await page.evaluate( + (htmlSelection) => htmlSelection?.innerHTML ?? document.body.innerHTML, + selectorHandle + ) + return result + } else { + return await page.evaluate(() => document.body.innerHTML) + } } } const loader = new PlaywrightWebBaseLoader(url, config) @@ -202,13 +232,14 @@ class Playwright_DocumentLoaders implements INode { } return docs } catch (err) { - if (process.env.DEBUG === 'true') options.logger.error(`error in PlaywrightWebBaseLoader: ${err.message}, on page: ${url}`) + if (process.env.DEBUG === 'true') + options.logger.error(`[${orgId}]: Error in PlaywrightWebBaseLoader: ${err.message}, on page: ${url}`) } } - let docs: IDocument[] = [] + let docs: Document[] = [] if (relativeLinksMethod) { - if (process.env.DEBUG === 'true') options.logger.info(`Start ${relativeLinksMethod}`) + if (process.env.DEBUG === 'true') options.logger.info(`[${orgId}]: Start PlaywrightWebBaseLoader ${relativeLinksMethod}`) // if limit is 0 we don't want it to default to 10 so we check explicitly for null or undefined // so when limit is 0 we can fetch all the links if (limit === null || limit === undefined) limit = 10 @@ -219,20 +250,32 @@ class Playwright_DocumentLoaders implements INode { : relativeLinksMethod === 'webCrawl' ? await webCrawl(url, limit) : await xmlScrape(url, limit) - if (process.env.DEBUG === 'true') options.logger.info(`pages: ${JSON.stringify(pages)}, length: ${pages.length}`) + if (process.env.DEBUG === 'true') + options.logger.info(`[${orgId}]: PlaywrightWebBaseLoader pages: ${JSON.stringify(pages)}, length: ${pages.length}`) if (!pages || pages.length === 0) throw new Error('No relative links found') for (const page of pages) { - docs.push(...(await playwrightLoader(page))) + const result = await playwrightLoader(page) + if (result) { + docs.push(...result) + } } - if (process.env.DEBUG === 'true') options.logger.info(`Finish ${relativeLinksMethod}`) + if (process.env.DEBUG === 'true') options.logger.info(`[${orgId}]: Finish PlaywrightWebBaseLoader ${relativeLinksMethod}`) } else if (selectedLinks && selectedLinks.length > 0) { if (process.env.DEBUG === 'true') - options.logger.info(`pages: ${JSON.stringify(selectedLinks)}, length: ${selectedLinks.length}`) + options.logger.info( + `[${orgId}]: PlaywrightWebBaseLoader pages: ${JSON.stringify(selectedLinks)}, length: ${selectedLinks.length}` + ) for (const page of selectedLinks.slice(0, limit)) { - docs.push(...(await playwrightLoader(page))) + const result = await playwrightLoader(page) + if (result) { + docs.push(...result) + } } } else { - docs = await playwrightLoader(url) + const result = await playwrightLoader(url) + if (result) { + docs.push(...result) + } } if (metadata) { diff --git a/packages/components/nodes/documentloaders/Puppeteer/Puppeteer.ts b/packages/components/nodes/documentloaders/Puppeteer/Puppeteer.ts index fa88f9632..9b4ada916 100644 --- a/packages/components/nodes/documentloaders/Puppeteer/Puppeteer.ts +++ b/packages/components/nodes/documentloaders/Puppeteer/Puppeteer.ts @@ -1,10 +1,11 @@ -import { omit } from 'lodash' -import { ICommonObject, IDocument, INode, INodeData, INodeParams } from '../../../src/Interface' -import { TextSplitter } from 'langchain/text_splitter' import { Browser, Page, PuppeteerWebBaseLoader, PuppeteerWebBaseLoaderOptions } from '@langchain/community/document_loaders/web/puppeteer' +import { Document } from '@langchain/core/documents' +import { TextSplitter } from 'langchain/text_splitter' import { test } from 'linkifyjs' -import { handleEscapeCharacters, INodeOutputsValue, webCrawl, xmlScrape } from '../../../src' +import { omit } from 'lodash' import { PuppeteerLifeCycleEvent } from 'puppeteer' +import { handleEscapeCharacters, INodeOutputsValue, webCrawl, xmlScrape } from '../../../src' +import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' class Puppeteer_DocumentLoaders implements INode { label: string @@ -109,6 +110,14 @@ class Puppeteer_DocumentLoaders implements INode { additionalParams: true, description: 'CSS selectors like .div or #div' }, + { + label: 'CSS Selector (Optional)', + name: 'cssSelector', + type: 'string', + description: 'Only content inside this selector will be extracted. Leave empty to use the entire page body.', + optional: true, + additionalParams: true + }, { label: 'Additional Metadata', name: 'metadata', @@ -151,10 +160,12 @@ class Puppeteer_DocumentLoaders implements INode { const relativeLinksMethod = nodeData.inputs?.relativeLinksMethod as string const selectedLinks = nodeData.inputs?.selectedLinks as string[] let limit = parseInt(nodeData.inputs?.limit as string) - let waitUntilGoToOption = nodeData.inputs?.waitUntilGoToOption as PuppeteerLifeCycleEvent - let waitForSelector = nodeData.inputs?.waitForSelector as string + const waitUntilGoToOption = nodeData.inputs?.waitUntilGoToOption as PuppeteerLifeCycleEvent + const waitForSelector = nodeData.inputs?.waitForSelector as string + const cssSelector = nodeData.inputs?.cssSelector as string const _omitMetadataKeys = nodeData.inputs?.omitMetadataKeys as string const output = nodeData.outputs?.output as string + const orgId = options.orgId let omitMetadataKeys: string[] = [] if (_omitMetadataKeys) { @@ -167,13 +178,17 @@ class Puppeteer_DocumentLoaders implements INode { throw new Error('Invalid URL') } - async function puppeteerLoader(url: string): Promise { + async function puppeteerLoader(url: string): Promise { try { - let docs = [] + let docs: Document[] = [] + + const executablePath = process.env.PUPPETEER_EXECUTABLE_PATH + const config: PuppeteerWebBaseLoaderOptions = { launchOptions: { args: ['--no-sandbox'], - headless: 'new' + headless: 'new', + executablePath: executablePath } } if (waitUntilGoToOption) { @@ -181,12 +196,22 @@ class Puppeteer_DocumentLoaders implements INode { waitUntil: waitUntilGoToOption } } - if (waitForSelector) { + if (cssSelector || waitForSelector) { config['evaluate'] = async (page: Page, _: Browser): Promise => { - await page.waitForSelector(waitForSelector) + if (waitForSelector) { + await page.waitForSelector(waitForSelector) + } - const result = await page.evaluate(() => document.body.innerHTML) - return result + if (cssSelector) { + const selectorHandle = await page.$(cssSelector) + const result = await page.evaluate( + (htmlSelection) => htmlSelection?.innerHTML ?? document.body.innerHTML, + selectorHandle + ) + return result + } else { + return await page.evaluate(() => document.body.innerHTML) + } } } const loader = new PuppeteerWebBaseLoader(url, config) @@ -198,13 +223,14 @@ class Puppeteer_DocumentLoaders implements INode { } return docs } catch (err) { - if (process.env.DEBUG === 'true') options.logger.error(`error in PuppeteerWebBaseLoader: ${err.message}, on page: ${url}`) + if (process.env.DEBUG === 'true') + options.logger.error(`[${orgId}]: Error in PuppeteerWebBaseLoader: ${err.message}, on page: ${url}`) } } - let docs: IDocument[] = [] + let docs: Document[] = [] if (relativeLinksMethod) { - if (process.env.DEBUG === 'true') options.logger.info(`Start ${relativeLinksMethod}`) + if (process.env.DEBUG === 'true') options.logger.info(`[${orgId}]: Start PuppeteerWebBaseLoader ${relativeLinksMethod}`) // if limit is 0 we don't want it to default to 10 so we check explicitly for null or undefined // so when limit is 0 we can fetch all the links if (limit === null || limit === undefined) limit = 10 @@ -215,20 +241,32 @@ class Puppeteer_DocumentLoaders implements INode { : relativeLinksMethod === 'webCrawl' ? await webCrawl(url, limit) : await xmlScrape(url, limit) - if (process.env.DEBUG === 'true') options.logger.info(`pages: ${JSON.stringify(pages)}, length: ${pages.length}`) + if (process.env.DEBUG === 'true') + options.logger.info(`[${orgId}]: PuppeteerWebBaseLoader pages: ${JSON.stringify(pages)}, length: ${pages.length}`) if (!pages || pages.length === 0) throw new Error('No relative links found') for (const page of pages) { - docs.push(...(await puppeteerLoader(page))) + const result = await puppeteerLoader(page) + if (result) { + docs.push(...result) + } } - if (process.env.DEBUG === 'true') options.logger.info(`Finish ${relativeLinksMethod}`) + if (process.env.DEBUG === 'true') options.logger.info(`[${orgId}]: Finish PuppeteerWebBaseLoader ${relativeLinksMethod}`) } else if (selectedLinks && selectedLinks.length > 0) { if (process.env.DEBUG === 'true') - options.logger.info(`pages: ${JSON.stringify(selectedLinks)}, length: ${selectedLinks.length}`) + options.logger.info( + `[${orgId}]: PuppeteerWebBaseLoader pages: ${JSON.stringify(selectedLinks)}, length: ${selectedLinks.length}` + ) for (const page of selectedLinks.slice(0, limit)) { - docs.push(...(await puppeteerLoader(page))) + const result = await puppeteerLoader(page) + if (result) { + docs.push(...result) + } } } else { - docs = await puppeteerLoader(url) + const result = await puppeteerLoader(url) + if (result) { + docs.push(...result) + } } if (metadata) { diff --git a/packages/components/nodes/documentloaders/S3Directory/S3Directory.ts b/packages/components/nodes/documentloaders/S3Directory/S3Directory.ts index 072822aef..4a55869e0 100644 --- a/packages/components/nodes/documentloaders/S3Directory/S3Directory.ts +++ b/packages/components/nodes/documentloaders/S3Directory/S3Directory.ts @@ -19,9 +19,9 @@ import { PDFLoader } from '@langchain/community/document_loaders/fs/pdf' import { DocxLoader } from '@langchain/community/document_loaders/fs/docx' import { TextLoader } from 'langchain/document_loaders/fs/text' import { TextSplitter } from 'langchain/text_splitter' - import { CSVLoader } from '../Csv/CsvLoader' - +import { LoadOfSheet } from '../MicrosoftExcel/ExcelLoader' +import { PowerpointLoader } from '../MicrosoftPowerpoint/PowerpointLoader' class S3_DocumentLoaders implements INode { label: string name: string @@ -240,7 +240,13 @@ class S3_DocumentLoaders implements INode { '.json': (path) => new JSONLoader(path), '.txt': (path) => new TextLoader(path), '.csv': (path) => new CSVLoader(path), + '.xls': (path) => new LoadOfSheet(path), + '.xlsx': (path) => new LoadOfSheet(path), + '.xlsm': (path) => new LoadOfSheet(path), + '.xlsb': (path) => new LoadOfSheet(path), '.docx': (path) => new DocxLoader(path), + '.ppt': (path) => new PowerpointLoader(path), + '.pptx': (path) => new PowerpointLoader(path), '.pdf': (path) => new PDFLoader(path, { splitPages: pdfUsage !== 'perFile', diff --git a/packages/components/nodes/documentloaders/S3File/README.md b/packages/components/nodes/documentloaders/S3File/README.md index 142ed86e1..ad069f5db 100644 --- a/packages/components/nodes/documentloaders/S3File/README.md +++ b/packages/components/nodes/documentloaders/S3File/README.md @@ -4,10 +4,10 @@ DS File Loarder integration for Flowise ## ๐ŸŒฑ Env Variables -| Variable | Description | Type | Default | -| ---------------------------- | ----------------------------------------------------------------------------------------------- | ------------------------------------------------ | ----------------------------------- | -| UNSTRUCTURED_API_URL | Default `unstructuredApiUrl` for S3 File Loader | String | http://localhost:8000/general/v0/general | +| Variable | Description | Type | Default | +| -------------------- | ----------------------------------------------- | ------ | ---------------------------------------- | +| UNSTRUCTURED_API_URL | Default `unstructuredApiUrl` for S3 File Loader | String | http://localhost:8000/general/v0/general | ## License -Source code in this repository is made available under the [Apache License Version 2.0](https://github.com/FlowiseAI/Flowise/blob/master/LICENSE.md). \ No newline at end of file +Source code in this repository is made available under the [Apache License Version 2.0](https://github.com/FlowiseAI/Flowise/blob/master/LICENSE.md). diff --git a/packages/components/nodes/documentloaders/S3File/S3File.ts b/packages/components/nodes/documentloaders/S3File/S3File.ts index 51c198045..d77f37e09 100644 --- a/packages/components/nodes/documentloaders/S3File/S3File.ts +++ b/packages/components/nodes/documentloaders/S3File/S3File.ts @@ -14,12 +14,21 @@ import { handleDocumentLoaderMetadata, handleDocumentLoaderOutput } from '../../../src/utils' -import { S3Client, GetObjectCommand, S3ClientConfig } from '@aws-sdk/client-s3' +import { S3Client, GetObjectCommand, HeadObjectCommand, S3ClientConfig } from '@aws-sdk/client-s3' import { getRegions, MODEL_TYPE } from '../../../src/modelLoader' import { Readable } from 'node:stream' import * as fsDefault from 'node:fs' import * as path from 'node:path' import * as os from 'node:os' +import { PDFLoader } from '@langchain/community/document_loaders/fs/pdf' +import { DocxLoader } from '@langchain/community/document_loaders/fs/docx' +import { CSVLoader } from '@langchain/community/document_loaders/fs/csv' +import { LoadOfSheet } from '../MicrosoftExcel/ExcelLoader' +import { PowerpointLoader } from '../MicrosoftPowerpoint/PowerpointLoader' +import { TextSplitter } from 'langchain/text_splitter' +import { IDocument } from '../../../src/Interface' +import { omit } from 'lodash' +import { handleEscapeCharacters } from '../../../src' class S3_DocumentLoaders implements INode { label: string @@ -37,7 +46,7 @@ class S3_DocumentLoaders implements INode { constructor() { this.label = 'S3' this.name = 'S3' - this.version = 4.0 + this.version = 5.0 this.type = 'Document' this.icon = 's3.svg' this.category = 'Document Loaders' @@ -70,6 +79,52 @@ class S3_DocumentLoaders implements INode { loadMethod: 'listRegions', default: 'us-east-1' }, + { + label: 'File Processing Method', + name: 'fileProcessingMethod', + type: 'options', + options: [ + { + label: 'Built In Loaders', + name: 'builtIn', + description: 'Use the built in loaders to process the file.' + }, + { + label: 'Unstructured', + name: 'unstructured', + description: 'Use the Unstructured API to process the file.' + } + ], + default: 'builtIn' + }, + { + label: 'Text Splitter', + name: 'textSplitter', + type: 'TextSplitter', + optional: true, + show: { + fileProcessingMethod: 'builtIn' + } + }, + { + label: 'Additional Metadata', + name: 'metadata', + type: 'json', + description: 'Additional metadata to be added to the extracted documents', + optional: true, + additionalParams: true + }, + { + label: 'Omit Metadata Keys', + name: 'omitMetadataKeys', + type: 'string', + rows: 4, + description: + 'Each document loader comes with a default set of metadata keys that are extracted from the document. You can use this field to omit some of the default metadata keys. The value should be a list of keys, seperated by comma. Use * to omit all metadata keys execept the ones you specify in the Additional Metadata field', + placeholder: 'key1, key2, key3.nestedKey1', + optional: true, + additionalParams: true + }, { label: 'Unstructured API URL', name: 'unstructuredAPIUrl', @@ -77,13 +132,21 @@ class S3_DocumentLoaders implements INode { 'Your Unstructured.io URL. Read more on how to get started', type: 'string', placeholder: process.env.UNSTRUCTURED_API_URL || 'http://localhost:8000/general/v0/general', - optional: !!process.env.UNSTRUCTURED_API_URL + optional: !!process.env.UNSTRUCTURED_API_URL, + additionalParams: true, + show: { + fileProcessingMethod: 'unstructured' + } }, { label: 'Unstructured API KEY', name: 'unstructuredAPIKey', type: 'password', - optional: true + optional: true, + additionalParams: true, + show: { + fileProcessingMethod: 'unstructured' + } }, { label: 'Strategy', @@ -110,7 +173,10 @@ class S3_DocumentLoaders implements INode { ], optional: true, additionalParams: true, - default: 'auto' + default: 'auto', + show: { + fileProcessingMethod: 'unstructured' + } }, { label: 'Encoding', @@ -119,7 +185,10 @@ class S3_DocumentLoaders implements INode { type: 'string', optional: true, additionalParams: true, - default: 'utf-8' + default: 'utf-8', + show: { + fileProcessingMethod: 'unstructured' + } }, { label: 'Skip Infer Table Types', @@ -214,7 +283,10 @@ class S3_DocumentLoaders implements INode { ], optional: true, additionalParams: true, - default: '["pdf", "jpg", "png"]' + default: '["pdf", "jpg", "png"]', + show: { + fileProcessingMethod: 'unstructured' + } }, { label: 'Hi-Res Model Name', @@ -247,7 +319,10 @@ class S3_DocumentLoaders implements INode { ], optional: true, additionalParams: true, - default: 'detectron2_onnx' + default: 'detectron2_onnx', + show: { + fileProcessingMethod: 'unstructured' + } }, { label: 'Chunking Strategy', @@ -267,7 +342,10 @@ class S3_DocumentLoaders implements INode { ], optional: true, additionalParams: true, - default: 'by_title' + default: 'by_title', + show: { + fileProcessingMethod: 'unstructured' + } }, { label: 'OCR Languages', @@ -337,7 +415,10 @@ class S3_DocumentLoaders implements INode { } ], optional: true, - additionalParams: true + additionalParams: true, + show: { + fileProcessingMethod: 'unstructured' + } }, { label: 'Source ID Key', @@ -348,7 +429,10 @@ class S3_DocumentLoaders implements INode { default: 'source', placeholder: 'source', optional: true, - additionalParams: true + additionalParams: true, + show: { + fileProcessingMethod: 'unstructured' + } }, { label: 'Coordinates', @@ -357,7 +441,10 @@ class S3_DocumentLoaders implements INode { description: 'If true, return coordinates for each element. Default: false.', optional: true, additionalParams: true, - default: false + default: false, + show: { + fileProcessingMethod: 'unstructured' + } }, { label: 'XML Keep Tags', @@ -366,7 +453,10 @@ class S3_DocumentLoaders implements INode { 'If True, will retain the XML tags in the output. Otherwise it will simply extract the text from within the tags. Only applies to partition_xml.', type: 'boolean', optional: true, - additionalParams: true + additionalParams: true, + show: { + fileProcessingMethod: 'unstructured' + } }, { label: 'Include Page Breaks', @@ -374,15 +464,10 @@ class S3_DocumentLoaders implements INode { description: 'When true, the output will include page break elements when the filetype supports it.', type: 'boolean', optional: true, - additionalParams: true - }, - { - label: 'XML Keep Tags', - name: 'xmlKeepTags', - description: 'Whether to keep XML tags in the output.', - type: 'boolean', - optional: true, - additionalParams: true + additionalParams: true, + show: { + fileProcessingMethod: 'unstructured' + } }, { label: 'Multi-Page Sections', @@ -390,7 +475,10 @@ class S3_DocumentLoaders implements INode { description: 'Whether to treat multi-page documents as separate sections.', type: 'boolean', optional: true, - additionalParams: true + additionalParams: true, + show: { + fileProcessingMethod: 'unstructured' + } }, { label: 'Combine Under N Chars', @@ -399,7 +487,10 @@ class S3_DocumentLoaders implements INode { "If chunking strategy is set, combine elements until a section reaches a length of n chars. Default: value of max_characters. Can't exceed value of max_characters.", type: 'number', optional: true, - additionalParams: true + additionalParams: true, + show: { + fileProcessingMethod: 'unstructured' + } }, { label: 'New After N Chars', @@ -408,7 +499,10 @@ class S3_DocumentLoaders implements INode { "If chunking strategy is set, cut off new sections after reaching a length of n chars (soft max). value of max_characters. Can't exceed value of max_characters.", type: 'number', optional: true, - additionalParams: true + additionalParams: true, + show: { + fileProcessingMethod: 'unstructured' + } }, { label: 'Max Characters', @@ -418,7 +512,10 @@ class S3_DocumentLoaders implements INode { type: 'number', optional: true, additionalParams: true, - default: '500' + default: '500', + show: { + fileProcessingMethod: 'unstructured' + } }, { label: 'Additional Metadata', @@ -426,7 +523,10 @@ class S3_DocumentLoaders implements INode { type: 'json', description: 'Additional metadata to be added to the extracted documents', optional: true, - additionalParams: true + additionalParams: true, + show: { + fileProcessingMethod: 'unstructured' + } }, { label: 'Omit Metadata Keys', @@ -437,7 +537,10 @@ class S3_DocumentLoaders implements INode { 'Each document loader comes with a default set of metadata keys that are extracted from the document. You can use this field to omit some of the default metadata keys. The value should be a list of keys, seperated by comma. Use * to omit all metadata keys execept the ones you specify in the Additional Metadata field', placeholder: 'key1, key2, key3.nestedKey1', optional: true, - additionalParams: true + additionalParams: true, + show: { + fileProcessingMethod: 'unstructured' + } } ] this.outputs = [ @@ -466,6 +569,171 @@ class S3_DocumentLoaders implements INode { const bucketName = nodeData.inputs?.bucketName as string const keyName = nodeData.inputs?.keyName as string const region = nodeData.inputs?.region as string + const fileProcessingMethod = nodeData.inputs?.fileProcessingMethod as string + const textSplitter = nodeData.inputs?.textSplitter as TextSplitter + const metadata = nodeData.inputs?.metadata + const _omitMetadataKeys = nodeData.inputs?.omitMetadataKeys as string + const output = nodeData.outputs?.output as string + + let omitMetadataKeys: string[] = [] + if (_omitMetadataKeys) { + omitMetadataKeys = _omitMetadataKeys.split(',').map((key) => key.trim()) + } + + let credentials: S3ClientConfig['credentials'] | undefined + + if (nodeData.credential) { + const credentialData = await getCredentialData(nodeData.credential, options) + const accessKeyId = getCredentialParam('awsKey', credentialData, nodeData) + const secretAccessKey = getCredentialParam('awsSecret', credentialData, nodeData) + + if (accessKeyId && secretAccessKey) { + credentials = { + accessKeyId, + secretAccessKey + } + } + } + + const s3Config: S3ClientConfig = { + region, + credentials + } + + if (fileProcessingMethod === 'builtIn') { + return await this.processWithBuiltInLoaders( + bucketName, + keyName, + s3Config, + textSplitter, + metadata, + omitMetadataKeys, + _omitMetadataKeys, + output + ) + } else { + return await this.processWithUnstructured(nodeData, options, bucketName, keyName, s3Config) + } + } + + private async processWithBuiltInLoaders( + bucketName: string, + keyName: string, + s3Config: S3ClientConfig, + textSplitter: TextSplitter, + metadata: any, + omitMetadataKeys: string[], + _omitMetadataKeys: string, + output: string + ): Promise { + let docs: IDocument[] = [] + + try { + const s3Client = new S3Client(s3Config) + + // Get file metadata to determine content type + const headCommand = new HeadObjectCommand({ + Bucket: bucketName, + Key: keyName + }) + + const headResponse = await s3Client.send(headCommand) + const contentType = headResponse.ContentType || this.getMimeTypeFromExtension(keyName) + + // Download the file + const getObjectCommand = new GetObjectCommand({ + Bucket: bucketName, + Key: keyName + }) + + const response = await s3Client.send(getObjectCommand) + + const objectData = await new Promise((resolve, reject) => { + const chunks: Buffer[] = [] + + if (response.Body instanceof Readable) { + response.Body.on('data', (chunk: Buffer) => chunks.push(chunk)) + response.Body.on('end', () => resolve(Buffer.concat(chunks))) + response.Body.on('error', reject) + } else { + reject(new Error('Response body is not a readable stream.')) + } + }) + + // Process the file based on content type + const fileInfo = { + id: keyName, + name: path.basename(keyName), + mimeType: contentType, + size: objectData.length, + webViewLink: `s3://${bucketName}/${keyName}`, + bucketName: bucketName, + key: keyName, + lastModified: headResponse.LastModified, + etag: headResponse.ETag + } + + docs = await this.processFile(fileInfo, objectData) + + // Apply text splitter if provided + if (textSplitter && docs.length > 0) { + docs = await textSplitter.splitDocuments(docs) + } + + // Apply metadata transformations + if (metadata) { + const parsedMetadata = typeof metadata === 'object' ? metadata : JSON.parse(metadata) + docs = docs.map((doc) => ({ + ...doc, + metadata: + _omitMetadataKeys === '*' + ? { + ...parsedMetadata + } + : omit( + { + ...doc.metadata, + ...parsedMetadata + }, + omitMetadataKeys + ) + })) + } else { + docs = docs.map((doc) => ({ + ...doc, + metadata: + _omitMetadataKeys === '*' + ? {} + : omit( + { + ...doc.metadata + }, + omitMetadataKeys + ) + })) + } + } catch (error) { + throw new Error(`Failed to load S3 document: ${error.message}`) + } + + if (output === 'document') { + return docs + } else { + let finaltext = '' + for (const doc of docs) { + finaltext += `${doc.pageContent}\n` + } + return handleEscapeCharacters(finaltext, false) + } + } + + private async processWithUnstructured( + nodeData: INodeData, + options: ICommonObject, + bucketName: string, + keyName: string, + s3Config: S3ClientConfig + ): Promise { const unstructuredAPIUrl = nodeData.inputs?.unstructuredAPIUrl as string const unstructuredAPIKey = nodeData.inputs?.unstructuredAPIKey as string const strategy = nodeData.inputs?.strategy as UnstructuredLoaderStrategy @@ -488,26 +756,6 @@ class S3_DocumentLoaders implements INode { const _omitMetadataKeys = nodeData.inputs?.omitMetadataKeys as string const output = nodeData.outputs?.output as string - let credentials: S3ClientConfig['credentials'] | undefined - - if (nodeData.credential) { - const credentialData = await getCredentialData(nodeData.credential, options) - const accessKeyId = getCredentialParam('awsKey', credentialData, nodeData) - const secretAccessKey = getCredentialParam('awsSecret', credentialData, nodeData) - - if (accessKeyId && secretAccessKey) { - credentials = { - accessKeyId, - secretAccessKey - } - } - } - - const s3Config: S3ClientConfig = { - region, - credentials - } - const loader = new S3Loader({ bucket: bucketName, key: keyName, @@ -586,5 +834,202 @@ class S3_DocumentLoaders implements INode { return loader.load() } + + private getMimeTypeFromExtension(fileName: string): string { + const extension = path.extname(fileName).toLowerCase() + const mimeTypeMap: { [key: string]: string } = { + '.pdf': 'application/pdf', + '.docx': 'application/vnd.openxmlformats-officedocument.wordprocessingml.document', + '.doc': 'application/msword', + '.xlsx': 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet', + '.xls': 'application/vnd.ms-excel', + '.pptx': 'application/vnd.openxmlformats-officedocument.presentationml.presentation', + '.ppt': 'application/vnd.ms-powerpoint', + '.txt': 'text/plain', + '.csv': 'text/csv', + '.html': 'text/html', + '.htm': 'text/html', + '.json': 'application/json', + '.xml': 'application/xml', + '.md': 'text/markdown' + } + return mimeTypeMap[extension] || 'application/octet-stream' + } + + private async processFile(fileInfo: any, buffer: Buffer): Promise { + try { + // Handle different file types + if (this.isTextBasedFile(fileInfo.mimeType)) { + // Process text files directly from buffer + const content = buffer.toString('utf-8') + + // Create document with metadata + return [ + { + pageContent: content, + metadata: { + source: fileInfo.webViewLink, + fileId: fileInfo.key, + fileName: fileInfo.name, + mimeType: fileInfo.mimeType, + size: fileInfo.size, + lastModified: fileInfo.lastModified, + etag: fileInfo.etag, + bucketName: fileInfo.bucketName + } + } + ] + } else if (this.isSupportedBinaryFile(fileInfo.mimeType)) { + // Process binary files using loaders + return await this.processBinaryFile(fileInfo, buffer) + } else { + console.warn(`Unsupported file type ${fileInfo.mimeType} for file ${fileInfo.name}`) + return [] + } + } catch (error) { + console.warn(`Failed to process file ${fileInfo.name}: ${error.message}`) + return [] + } + } + + private isTextBasedFile(mimeType: string): boolean { + const textBasedMimeTypes = [ + 'text/plain', + 'text/html', + 'text/css', + 'text/javascript', + 'text/csv', + 'text/xml', + 'application/json', + 'application/xml', + 'text/markdown', + 'text/x-markdown' + ] + return textBasedMimeTypes.includes(mimeType) + } + + private isSupportedBinaryFile(mimeType: string): boolean { + const supportedBinaryTypes = [ + 'application/pdf', + 'application/vnd.openxmlformats-officedocument.wordprocessingml.document', + 'application/msword', + 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet', + 'application/vnd.ms-excel', + 'application/vnd.openxmlformats-officedocument.presentationml.presentation', + 'application/vnd.ms-powerpoint' + ] + return supportedBinaryTypes.includes(mimeType) + } + + private async processBinaryFile(fileInfo: any, buffer: Buffer): Promise { + let tempFilePath: string | null = null + + try { + // Create temporary file + tempFilePath = await this.createTempFile(buffer, fileInfo.name, fileInfo.mimeType) + + let docs: IDocument[] = [] + const mimeType = fileInfo.mimeType.toLowerCase() + + switch (mimeType) { + case 'application/pdf': { + const pdfLoader = new PDFLoader(tempFilePath, { + // @ts-ignore + pdfjs: () => import('pdf-parse/lib/pdf.js/v1.10.100/build/pdf.js') + }) + docs = await pdfLoader.load() + break + } + case 'application/vnd.openxmlformats-officedocument.wordprocessingml.document': + case 'application/msword': { + const docxLoader = new DocxLoader(tempFilePath) + docs = await docxLoader.load() + break + } + case 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': + case 'application/vnd.ms-excel': { + const excelLoader = new LoadOfSheet(tempFilePath) + docs = await excelLoader.load() + break + } + case 'application/vnd.openxmlformats-officedocument.presentationml.presentation': + case 'application/vnd.ms-powerpoint': { + const pptxLoader = new PowerpointLoader(tempFilePath) + docs = await pptxLoader.load() + break + } + case 'text/csv': { + const csvLoader = new CSVLoader(tempFilePath) + docs = await csvLoader.load() + break + } + default: + throw new Error(`Unsupported binary file type: ${mimeType}`) + } + + // Add S3 metadata to each document + if (docs.length > 0) { + const s3Metadata = { + source: fileInfo.webViewLink, + fileId: fileInfo.key, + fileName: fileInfo.name, + mimeType: fileInfo.mimeType, + size: fileInfo.size, + lastModified: fileInfo.lastModified, + etag: fileInfo.etag, + bucketName: fileInfo.bucketName, + totalPages: docs.length // Total number of pages/sheets in the file + } + + return docs.map((doc, index) => ({ + ...doc, + metadata: { + ...doc.metadata, // Keep original loader metadata (page numbers, etc.) + ...s3Metadata, // Add S3 metadata + pageIndex: index // Add page/sheet index + } + })) + } + + return [] + } catch (error) { + throw new Error(`Failed to process binary file: ${error.message}`) + } finally { + // Clean up temporary file + if (tempFilePath && fsDefault.existsSync(tempFilePath)) { + try { + fsDefault.unlinkSync(tempFilePath) + } catch (e) { + console.warn(`Failed to delete temporary file: ${tempFilePath}`) + } + } + } + } + + private async createTempFile(buffer: Buffer, fileName: string, mimeType: string): Promise { + // Get appropriate file extension + let extension = path.extname(fileName) + if (!extension) { + const extensionMap: { [key: string]: string } = { + 'application/pdf': '.pdf', + 'application/vnd.openxmlformats-officedocument.wordprocessingml.document': '.docx', + 'application/msword': '.doc', + 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': '.xlsx', + 'application/vnd.ms-excel': '.xls', + 'application/vnd.openxmlformats-officedocument.presentationml.presentation': '.pptx', + 'application/vnd.ms-powerpoint': '.ppt', + 'text/csv': '.csv' + } + extension = extensionMap[mimeType] || '.tmp' + } + + // Create temporary file + const tempDir = os.tmpdir() + const tempFileName = `s3_${Date.now()}_${Math.random().toString(36).substring(7)}${extension}` + const tempFilePath = path.join(tempDir, tempFileName) + + fsDefault.writeFileSync(tempFilePath, buffer) + return tempFilePath + } } module.exports = { nodeClass: S3_DocumentLoaders } diff --git a/packages/components/nodes/documentloaders/Text/Text.ts b/packages/components/nodes/documentloaders/Text/Text.ts index 950107d6a..448a25979 100644 --- a/packages/components/nodes/documentloaders/Text/Text.ts +++ b/packages/components/nodes/documentloaders/Text/Text.ts @@ -98,11 +98,12 @@ class Text_DocumentLoaders implements INode { } else { files = [fileName] } + const orgId = options.orgId const chatflowid = options.chatflowid for (const file of files) { if (!file) continue - const fileData = await getFileFromStorage(file, chatflowid) + const fileData = await getFileFromStorage(file, orgId, chatflowid) const blob = new Blob([fileData]) const loader = new TextLoader(blob) diff --git a/packages/components/nodes/documentloaders/Unstructured/README.md b/packages/components/nodes/documentloaders/Unstructured/README.md index 0854cc67d..5295e9f1f 100644 --- a/packages/components/nodes/documentloaders/Unstructured/README.md +++ b/packages/components/nodes/documentloaders/Unstructured/README.md @@ -4,10 +4,10 @@ Unstructured File Loader integration for Flowise ## ๐ŸŒฑ Env Variables -| Variable | Description | Type | Default | -| ---------------------------- | ----------------------------------------------------------------------------------------------- | ------------------------------------------------ | ----------------------------------- | -| UNSTRUCTURED_API_URL | Default `apiUrl` for Unstructured File/Floder Loader | String | http://localhost:8000/general/v0/general | +| Variable | Description | Type | Default | +| -------------------- | ---------------------------------------------------- | ------ | ---------------------------------------- | +| UNSTRUCTURED_API_URL | Default `apiUrl` for Unstructured File/Floder Loader | String | http://localhost:8000/general/v0/general | ## License -Source code in this repository is made available under the [Apache License Version 2.0](https://github.com/FlowiseAI/Flowise/blob/master/LICENSE.md). \ No newline at end of file +Source code in this repository is made available under the [Apache License Version 2.0](https://github.com/FlowiseAI/Flowise/blob/master/LICENSE.md). diff --git a/packages/components/nodes/documentloaders/Unstructured/Unstructured.ts b/packages/components/nodes/documentloaders/Unstructured/Unstructured.ts index 8f3e49f62..2d976bfb8 100644 --- a/packages/components/nodes/documentloaders/Unstructured/Unstructured.ts +++ b/packages/components/nodes/documentloaders/Unstructured/Unstructured.ts @@ -27,8 +27,6 @@ type Element = { } export class UnstructuredLoader extends BaseDocumentLoader { - public filePath: string - private apiUrl = process.env.UNSTRUCTURED_API_URL || 'https://api.unstructuredapp.io/general/v0/general' private apiKey: string | undefined = process.env.UNSTRUCTURED_API_KEY @@ -138,7 +136,7 @@ export class UnstructuredLoader extends BaseDocumentLoader { }) if (!response.ok) { - throw new Error(`Failed to partition file ${this.filePath} with error ${response.status} and message ${await response.text()}`) + throw new Error(`Failed to partition file with error ${response.status} and message ${await response.text()}`) } const elements = await response.json() diff --git a/packages/components/nodes/documentloaders/Unstructured/UnstructuredFile.ts b/packages/components/nodes/documentloaders/Unstructured/UnstructuredFile.ts index 82aa03acc..d1a372b0c 100644 --- a/packages/components/nodes/documentloaders/Unstructured/UnstructuredFile.ts +++ b/packages/components/nodes/documentloaders/Unstructured/UnstructuredFile.ts @@ -4,8 +4,7 @@ import { UnstructuredLoaderOptions, UnstructuredLoaderStrategy, SkipInferTableTypes, - HiResModelName, - UnstructuredLoader as LCUnstructuredLoader + HiResModelName } from '@langchain/community/document_loaders/fs/unstructured' import { getCredentialData, getCredentialParam, handleEscapeCharacters } from '../../../src/utils' import { getFileFromStorage, INodeOutputsValue } from '../../../src' @@ -41,17 +40,6 @@ class UnstructuredFile_DocumentLoaders implements INode { optional: true } this.inputs = [ - /** Deprecated - { - label: 'File Path', - name: 'filePath', - type: 'string', - placeholder: '', - optional: true, - warning: - 'Use the File Upload instead of File path. If file is uploaded, this path is ignored. Path will be deprecated in future releases.' - }, - */ { label: 'Files Upload', name: 'fileObject', @@ -452,7 +440,6 @@ class UnstructuredFile_DocumentLoaders implements INode { } async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { - const filePath = nodeData.inputs?.filePath as string const unstructuredAPIUrl = nodeData.inputs?.unstructuredAPIUrl as string const strategy = nodeData.inputs?.strategy as UnstructuredLoaderStrategy const encoding = nodeData.inputs?.encoding as string @@ -532,11 +519,12 @@ class UnstructuredFile_DocumentLoaders implements INode { } else { files = [fileName] } + const orgId = options.orgId const chatflowid = options.chatflowid for (const file of files) { if (!file) continue - const fileData = await getFileFromStorage(file, chatflowid) + const fileData = await getFileFromStorage(file, orgId, chatflowid) const loaderDocs = await loader.loadAndSplitBuffer(fileData, file) docs.push(...loaderDocs) } @@ -556,12 +544,8 @@ class UnstructuredFile_DocumentLoaders implements INode { docs.push(...loaderDocs) } } - } else if (filePath) { - const loader = new LCUnstructuredLoader(filePath, obj) - const loaderDocs = await loader.load() - docs.push(...loaderDocs) } else { - throw new Error('File path or File upload is required') + throw new Error('File upload is required') } if (metadata) { diff --git a/packages/components/nodes/documentloaders/Unstructured/UnstructuredFolder.ts b/packages/components/nodes/documentloaders/Unstructured/UnstructuredFolder.ts index efa20caf1..1e217a65e 100644 --- a/packages/components/nodes/documentloaders/Unstructured/UnstructuredFolder.ts +++ b/packages/components/nodes/documentloaders/Unstructured/UnstructuredFolder.ts @@ -1,3 +1,6 @@ +/* +* Uncomment this if you want to use the UnstructuredFolder to load a folder from the file system + import { omit } from 'lodash' import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' import { @@ -516,3 +519,4 @@ class UnstructuredFolder_DocumentLoaders implements INode { } module.exports = { nodeClass: UnstructuredFolder_DocumentLoaders } +*/ diff --git a/packages/components/nodes/embeddings/AWSBedrockEmbedding/AWSBedrockEmbedding.ts b/packages/components/nodes/embeddings/AWSBedrockEmbedding/AWSBedrockEmbedding.ts index 4946fa8bb..5ee12705e 100644 --- a/packages/components/nodes/embeddings/AWSBedrockEmbedding/AWSBedrockEmbedding.ts +++ b/packages/components/nodes/embeddings/AWSBedrockEmbedding/AWSBedrockEmbedding.ts @@ -96,7 +96,7 @@ class AWSBedrockEmbedding_Embeddings implements INode { { label: 'Max AWS API retries', name: 'maxRetries', - description: 'This will limit the nubmer of AWS API for Titan model embeddings call retries. Used to avoid throttling.', + description: 'This will limit the number of AWS API for Titan model embeddings call retries. Used to avoid throttling.', type: 'number', optional: true, default: 5, diff --git a/packages/components/nodes/embeddings/AzureOpenAIEmbedding/README.md b/packages/components/nodes/embeddings/AzureOpenAIEmbedding/README.md index c3bd59e54..bd8df9f53 100644 --- a/packages/components/nodes/embeddings/AzureOpenAIEmbedding/README.md +++ b/packages/components/nodes/embeddings/AzureOpenAIEmbedding/README.md @@ -4,13 +4,13 @@ Azure OpenAI Embedding Model integration for Flowise ## ๐ŸŒฑ Env Variables -| Variable | Description | Type | Default | -| ---------------------------- | ----------------------------------------------------------------------------------------------- | ------------------------------------------------ | ----------------------------------- | -| AZURE_OPENAI_API_KEY | Default `credential.azureOpenAIApiKey` for Azure OpenAI Model | String | | -| AZURE_OPENAI_API_INSTANCE_NAME | Default `credential.azureOpenAIApiInstanceName` for Azure OpenAI Model | String | | -| AZURE_OPENAI_API_EMBEDDINGS_DEPLOYMENT_NAME | Default `credential.azureOpenAIApiDeploymentName` for Azure OpenAI Model | String | | -| AZURE_OPENAI_API_VERSION | Default `credential.azureOpenAIApiVersion` for Azure OpenAI Model | String | | +| Variable | Description | Type | Default | +| ------------------------------------------- | ------------------------------------------------------------------------ | ------ | ------- | +| AZURE_OPENAI_API_KEY | Default `credential.azureOpenAIApiKey` for Azure OpenAI Model | String | | +| AZURE_OPENAI_API_INSTANCE_NAME | Default `credential.azureOpenAIApiInstanceName` for Azure OpenAI Model | String | | +| AZURE_OPENAI_API_EMBEDDINGS_DEPLOYMENT_NAME | Default `credential.azureOpenAIApiDeploymentName` for Azure OpenAI Model | String | | +| AZURE_OPENAI_API_VERSION | Default `credential.azureOpenAIApiVersion` for Azure OpenAI Model | String | | ## License -Source code in this repository is made available under the [Apache License Version 2.0](https://github.com/FlowiseAI/Flowise/blob/master/LICENSE.md). \ No newline at end of file +Source code in this repository is made available under the [Apache License Version 2.0](https://github.com/FlowiseAI/Flowise/blob/master/LICENSE.md). diff --git a/packages/components/nodes/embeddings/GoogleGenerativeAIEmbedding/GoogleGenerativeAIEmbedding.ts b/packages/components/nodes/embeddings/GoogleGenerativeAIEmbedding/GoogleGenerativeAIEmbedding.ts index 2aaf53d80..f270d7d0e 100644 --- a/packages/components/nodes/embeddings/GoogleGenerativeAIEmbedding/GoogleGenerativeAIEmbedding.ts +++ b/packages/components/nodes/embeddings/GoogleGenerativeAIEmbedding/GoogleGenerativeAIEmbedding.ts @@ -4,6 +4,25 @@ import { GoogleGenerativeAIEmbeddings, GoogleGenerativeAIEmbeddingsParams } from import { TaskType } from '@google/generative-ai' import { MODEL_TYPE, getModels } from '../../../src/modelLoader' +class GoogleGenerativeAIEmbeddingsWithStripNewLines extends GoogleGenerativeAIEmbeddings { + stripNewLines: boolean + + constructor(params: GoogleGenerativeAIEmbeddingsParams & { stripNewLines?: boolean }) { + super(params) + this.stripNewLines = params.stripNewLines ?? false + } + + async embedDocuments(texts: string[]): Promise { + const processedTexts = this.stripNewLines ? texts.map((text) => text.replace(/\n/g, ' ')) : texts + return super.embedDocuments(processedTexts) + } + + async embedQuery(text: string): Promise { + const processedText = this.stripNewLines ? text.replace(/\n/g, ' ') : text + return super.embedQuery(processedText) + } +} + class GoogleGenerativeAIEmbedding_Embeddings implements INode { label: string name: string @@ -24,7 +43,7 @@ class GoogleGenerativeAIEmbedding_Embeddings implements INode { this.icon = 'GoogleGemini.svg' this.category = 'Embeddings' this.description = 'Google Generative API to generate embeddings for a given text' - this.baseClasses = [this.type, ...getBaseClasses(GoogleGenerativeAIEmbeddings)] + this.baseClasses = [this.type, ...getBaseClasses(GoogleGenerativeAIEmbeddingsWithStripNewLines)] this.credential = { label: 'Connect Credential', name: 'credential', @@ -55,6 +74,14 @@ class GoogleGenerativeAIEmbedding_Embeddings implements INode { { label: 'CLUSTERING', name: 'CLUSTERING' } ], default: 'TASK_TYPE_UNSPECIFIED' + }, + { + label: 'Strip New Lines', + name: 'stripNewLines', + type: 'boolean', + optional: true, + additionalParams: true, + description: 'Remove new lines from input text before embedding to reduce token count' } ] } @@ -71,6 +98,7 @@ class GoogleGenerativeAIEmbedding_Embeddings implements INode { const modelName = nodeData.inputs?.modelName as string const credentialData = await getCredentialData(nodeData.credential ?? '', options) const apiKey = getCredentialParam('googleGenerativeAPIKey', credentialData, nodeData) + const stripNewLines = nodeData.inputs?.stripNewLines as boolean let taskType: TaskType switch (nodeData.inputs?.tasktype as string) { @@ -93,13 +121,14 @@ class GoogleGenerativeAIEmbedding_Embeddings implements INode { taskType = TaskType.TASK_TYPE_UNSPECIFIED break } - const obj: GoogleGenerativeAIEmbeddingsParams = { + const obj: GoogleGenerativeAIEmbeddingsParams & { stripNewLines?: boolean } = { apiKey: apiKey, modelName: modelName, - taskType: taskType + taskType: taskType, + stripNewLines } - const model = new GoogleGenerativeAIEmbeddings(obj) + const model = new GoogleGenerativeAIEmbeddingsWithStripNewLines(obj) return model } } diff --git a/packages/components/nodes/embeddings/GoogleVertexAIEmbedding/GoogleVertexAIEmbedding.ts b/packages/components/nodes/embeddings/GoogleVertexAIEmbedding/GoogleVertexAIEmbedding.ts index b34fd954c..1744ff241 100644 --- a/packages/components/nodes/embeddings/GoogleVertexAIEmbedding/GoogleVertexAIEmbedding.ts +++ b/packages/components/nodes/embeddings/GoogleVertexAIEmbedding/GoogleVertexAIEmbedding.ts @@ -1,7 +1,27 @@ -import { VertexAIEmbeddings, GoogleVertexAIEmbeddingsInput } from '@langchain/google-vertexai' +import { GoogleVertexAIEmbeddingsInput, VertexAIEmbeddings } from '@langchain/google-vertexai' +import { buildGoogleCredentials } from '../../../src/google-utils' import { ICommonObject, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../src/Interface' -import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' -import { MODEL_TYPE, getModels } from '../../../src/modelLoader' +import { MODEL_TYPE, getModels, getRegions } from '../../../src/modelLoader' +import { getBaseClasses } from '../../../src/utils' + +class VertexAIEmbeddingsWithStripNewLines extends VertexAIEmbeddings { + stripNewLines: boolean + + constructor(params: GoogleVertexAIEmbeddingsInput & { stripNewLines?: boolean }) { + super(params) + this.stripNewLines = params.stripNewLines ?? false + } + + async embedDocuments(texts: string[]): Promise { + const processedTexts = this.stripNewLines ? texts.map((text) => text.replace(/\n/g, ' ')) : texts + return super.embedDocuments(processedTexts) + } + + async embedQuery(text: string): Promise { + const processedText = this.stripNewLines ? text.replace(/\n/g, ' ') : text + return super.embedQuery(processedText) + } +} class GoogleVertexAIEmbedding_Embeddings implements INode { label: string @@ -18,12 +38,12 @@ class GoogleVertexAIEmbedding_Embeddings implements INode { constructor() { this.label = 'GoogleVertexAI Embeddings' this.name = 'googlevertexaiEmbeddings' - this.version = 2.0 + this.version = 2.1 this.type = 'GoogleVertexAIEmbeddings' this.icon = 'GoogleVertex.svg' this.category = 'Embeddings' this.description = 'Google vertexAI API to generate embeddings for a given text' - this.baseClasses = [this.type, ...getBaseClasses(VertexAIEmbeddings)] + this.baseClasses = [this.type, ...getBaseClasses(VertexAIEmbeddingsWithStripNewLines)] this.credential = { label: 'Connect Credential', name: 'credential', @@ -39,7 +59,23 @@ class GoogleVertexAIEmbedding_Embeddings implements INode { name: 'modelName', type: 'asyncOptions', loadMethod: 'listModels', - default: 'textembedding-gecko@001' + default: 'text-embedding-004' + }, + { + label: 'Region', + description: 'Region to use for the model.', + name: 'region', + type: 'asyncOptions', + loadMethod: 'listRegions', + optional: true + }, + { + label: 'Strip New Lines', + name: 'stripNewLines', + type: 'boolean', + optional: true, + additionalParams: true, + description: 'Remove new lines from input text before embedding to reduce token count' } ] } @@ -48,38 +84,28 @@ class GoogleVertexAIEmbedding_Embeddings implements INode { loadMethods = { async listModels(): Promise { return await getModels(MODEL_TYPE.EMBEDDING, 'googlevertexaiEmbeddings') + }, + async listRegions(): Promise { + return await getRegions(MODEL_TYPE.EMBEDDING, 'googlevertexaiEmbeddings') } } async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { - const credentialData = await getCredentialData(nodeData.credential ?? '', options) const modelName = nodeData.inputs?.modelName as string - const googleApplicationCredentialFilePath = getCredentialParam('googleApplicationCredentialFilePath', credentialData, nodeData) - const googleApplicationCredential = getCredentialParam('googleApplicationCredential', credentialData, nodeData) - const projectID = getCredentialParam('projectID', credentialData, nodeData) + const region = nodeData.inputs?.region as string + const stripNewLines = nodeData.inputs?.stripNewLines as boolean - const authOptions: any = {} - if (Object.keys(credentialData).length !== 0) { - if (!googleApplicationCredentialFilePath && !googleApplicationCredential) - throw new Error('Please specify your Google Application Credential') - if (!googleApplicationCredentialFilePath && !googleApplicationCredential) - throw new Error( - 'Error: More than one component has been inputted. Please use only one of the following: Google Application Credential File Path or Google Credential JSON Object' - ) - - if (googleApplicationCredentialFilePath && !googleApplicationCredential) - authOptions.keyFile = googleApplicationCredentialFilePath - else if (!googleApplicationCredentialFilePath && googleApplicationCredential) - authOptions.credentials = JSON.parse(googleApplicationCredential) - - if (projectID) authOptions.projectId = projectID + const obj: GoogleVertexAIEmbeddingsInput & { stripNewLines?: boolean } = { + model: modelName, + stripNewLines } - const obj: GoogleVertexAIEmbeddingsInput = { - model: modelName - } - if (Object.keys(authOptions).length !== 0) obj.authOptions = authOptions - const model = new VertexAIEmbeddings(obj) + const authOptions = await buildGoogleCredentials(nodeData, options) + if (authOptions && Object.keys(authOptions).length !== 0) obj.authOptions = authOptions + + if (region) obj.location = region + + const model = new VertexAIEmbeddingsWithStripNewLines(obj) return model } } diff --git a/packages/components/nodes/embeddings/HuggingFaceInferenceEmbedding/core.ts b/packages/components/nodes/embeddings/HuggingFaceInferenceEmbedding/core.ts index 49e6efa43..3ee4cb8e1 100644 --- a/packages/components/nodes/embeddings/HuggingFaceInferenceEmbedding/core.ts +++ b/packages/components/nodes/embeddings/HuggingFaceInferenceEmbedding/core.ts @@ -23,24 +23,22 @@ export class HuggingFaceInferenceEmbeddings extends Embeddings implements Huggin this.model = fields?.model ?? 'sentence-transformers/distilbert-base-nli-mean-tokens' this.apiKey = fields?.apiKey ?? getEnvironmentVariable('HUGGINGFACEHUB_API_KEY') this.endpoint = fields?.endpoint ?? '' - this.client = new HfInference(this.apiKey) - if (this.endpoint) this.client.endpoint(this.endpoint) + const hf = new HfInference(this.apiKey) + // v4 uses Inference Providers by default; only override if custom endpoint provided + this.client = this.endpoint ? hf.endpoint(this.endpoint) : hf } async _embed(texts: string[]): Promise { // replace newlines, which can negatively affect performance. const clean = texts.map((text) => text.replace(/\n/g, ' ')) - const hf = new HfInference(this.apiKey) const obj: any = { inputs: clean } - if (this.endpoint) { - hf.endpoint(this.endpoint) - } else { + if (!this.endpoint) { obj.model = this.model } - const res = await this.caller.callWithOptions({}, hf.featureExtraction.bind(hf), obj) + const res = await this.caller.callWithOptions({}, this.client.featureExtraction.bind(this.client), obj) return res as number[][] } diff --git a/packages/components/nodes/engine/ChatEngine/ContextChatEngine.ts b/packages/components/nodes/engine/ChatEngine/ContextChatEngine.ts index 35b6ae069..db85d061a 100644 --- a/packages/components/nodes/engine/ChatEngine/ContextChatEngine.ts +++ b/packages/components/nodes/engine/ChatEngine/ContextChatEngine.ts @@ -10,6 +10,7 @@ import { } from '../../../src/Interface' import { Metadata, BaseRetriever, LLM, ContextChatEngine, ChatMessage, NodeWithScore } from 'llamaindex' import { reformatSourceDocuments } from '../EngineUtils' +import { EvaluationRunTracerLlama } from '../../../evaluation/EvaluationRunTracerLlama' class ContextChatEngine_LlamaIndex implements INode { label: string @@ -93,6 +94,9 @@ class ContextChatEngine_LlamaIndex implements INode { const chatEngine = new ContextChatEngine({ chatModel: model, retriever: vectorStoreRetriever }) + // these are needed for evaluation runs + await EvaluationRunTracerLlama.injectEvaluationMetadata(nodeData, options, chatEngine) + const msgs = (await memory.getChatMessages(this.sessionId, false, prependMessages)) as IMessage[] for (const message of msgs) { if (message.type === 'apiMessage') { diff --git a/packages/components/nodes/engine/ChatEngine/SimpleChatEngine.ts b/packages/components/nodes/engine/ChatEngine/SimpleChatEngine.ts index e6045fda6..20f48edc1 100644 --- a/packages/components/nodes/engine/ChatEngine/SimpleChatEngine.ts +++ b/packages/components/nodes/engine/ChatEngine/SimpleChatEngine.ts @@ -9,6 +9,7 @@ import { IServerSideEventStreamer } from '../../../src/Interface' import { LLM, ChatMessage, SimpleChatEngine } from 'llamaindex' +import { EvaluationRunTracerLlama } from '../../../evaluation/EvaluationRunTracerLlama' class SimpleChatEngine_LlamaIndex implements INode { label: string @@ -78,6 +79,9 @@ class SimpleChatEngine_LlamaIndex implements INode { const chatEngine = new SimpleChatEngine({ llm: model }) + // these are needed for evaluation runs + await EvaluationRunTracerLlama.injectEvaluationMetadata(nodeData, options, chatEngine) + const msgs = (await memory.getChatMessages(this.sessionId, false, prependMessages)) as IMessage[] for (const message of msgs) { if (message.type === 'apiMessage') { diff --git a/packages/components/nodes/engine/QueryEngine/QueryEngine.ts b/packages/components/nodes/engine/QueryEngine/QueryEngine.ts index 14eb3c5de..2a7e98866 100644 --- a/packages/components/nodes/engine/QueryEngine/QueryEngine.ts +++ b/packages/components/nodes/engine/QueryEngine/QueryEngine.ts @@ -10,6 +10,7 @@ import { NodeWithScore } from 'llamaindex' import { reformatSourceDocuments } from '../EngineUtils' +import { EvaluationRunTracerLlama } from '../../../evaluation/EvaluationRunTracerLlama' class QueryEngine_LlamaIndex implements INode { label: string @@ -72,6 +73,8 @@ class QueryEngine_LlamaIndex implements INode { let sourceNodes: NodeWithScore[] = [] let isStreamingStarted = false + await EvaluationRunTracerLlama.injectEvaluationMetadata(nodeData, options, queryEngine) + const shouldStreamResponse = options.shouldStreamResponse const sseStreamer: IServerSideEventStreamer = options.sseStreamer as IServerSideEventStreamer const chatId = options.chatId diff --git a/packages/components/nodes/engine/SubQuestionQueryEngine/SubQuestionQueryEngine.ts b/packages/components/nodes/engine/SubQuestionQueryEngine/SubQuestionQueryEngine.ts index 6d8ceead9..02862c740 100644 --- a/packages/components/nodes/engine/SubQuestionQueryEngine/SubQuestionQueryEngine.ts +++ b/packages/components/nodes/engine/SubQuestionQueryEngine/SubQuestionQueryEngine.ts @@ -15,6 +15,7 @@ import { NodeWithScore } from 'llamaindex' import { reformatSourceDocuments } from '../EngineUtils' +import { EvaluationRunTracerLlama } from '../../../evaluation/EvaluationRunTracerLlama' class SubQuestionQueryEngine_LlamaIndex implements INode { label: string @@ -38,7 +39,7 @@ class SubQuestionQueryEngine_LlamaIndex implements INode { this.icon = 'subQueryEngine.svg' this.category = 'Engine' this.description = - 'Breaks complex query into sub questions for each relevant data source, then gather all the intermediate reponses and synthesizes a final response' + 'Breaks complex query into sub questions for each relevant data source, then gather all the intermediate responses and synthesizes a final response' this.baseClasses = [this.type, 'BaseQueryEngine'] this.tags = ['LlamaIndex'] this.inputs = [ @@ -89,6 +90,8 @@ class SubQuestionQueryEngine_LlamaIndex implements INode { let sourceNodes: NodeWithScore[] = [] let isStreamingStarted = false + await EvaluationRunTracerLlama.injectEvaluationMetadata(nodeData, options, queryEngine) + const shouldStreamResponse = options.shouldStreamResponse const sseStreamer: IServerSideEventStreamer = options.sseStreamer as IServerSideEventStreamer const chatId = options.chatId diff --git a/packages/components/nodes/llms/Azure OpenAI/README.md b/packages/components/nodes/llms/Azure OpenAI/README.md index de47c4dd8..9d7dd4a50 100644 --- a/packages/components/nodes/llms/Azure OpenAI/README.md +++ b/packages/components/nodes/llms/Azure OpenAI/README.md @@ -4,13 +4,13 @@ Azure OpenAI LLM integration for Flowise ## ๐ŸŒฑ Env Variables -| Variable | Description | Type | Default | -| ---------------------------- | ----------------------------------------------------------------------------------------------- | ------------------------------------------------ | ----------------------------------- | -| AZURE_OPENAI_API_KEY | Default `credential.azureOpenAIApiKey` for Azure OpenAI LLM | String | | -| AZURE_OPENAI_API_INSTANCE_NAME | Default `credential.azureOpenAIApiInstanceName` for Azure OpenAI LLM | String | | -| AZURE_OPENAI_API_DEPLOYMENT_NAME | Default `credential.azureOpenAIApiDeploymentName` for Azure OpenAI LLM | String | | -| AZURE_OPENAI_API_VERSION | Default `credential.azureOpenAIApiVersion` for Azure OpenAI LLM | String | | +| Variable | Description | Type | Default | +| -------------------------------- | ---------------------------------------------------------------------- | ------ | ------- | +| AZURE_OPENAI_API_KEY | Default `credential.azureOpenAIApiKey` for Azure OpenAI LLM | String | | +| AZURE_OPENAI_API_INSTANCE_NAME | Default `credential.azureOpenAIApiInstanceName` for Azure OpenAI LLM | String | | +| AZURE_OPENAI_API_DEPLOYMENT_NAME | Default `credential.azureOpenAIApiDeploymentName` for Azure OpenAI LLM | String | | +| AZURE_OPENAI_API_VERSION | Default `credential.azureOpenAIApiVersion` for Azure OpenAI LLM | String | | ## License -Source code in this repository is made available under the [Apache License Version 2.0](https://github.com/FlowiseAI/Flowise/blob/master/LICENSE.md). \ No newline at end of file +Source code in this repository is made available under the [Apache License Version 2.0](https://github.com/FlowiseAI/Flowise/blob/master/LICENSE.md). diff --git a/packages/components/nodes/llms/GoogleVertexAI/GoogleVertexAI.ts b/packages/components/nodes/llms/GoogleVertexAI/GoogleVertexAI.ts index fa6ec0000..273ce81b9 100644 --- a/packages/components/nodes/llms/GoogleVertexAI/GoogleVertexAI.ts +++ b/packages/components/nodes/llms/GoogleVertexAI/GoogleVertexAI.ts @@ -1,8 +1,9 @@ import { BaseCache } from '@langchain/core/caches' import { VertexAI, VertexAIInput } from '@langchain/google-vertexai' import { ICommonObject, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../src/Interface' -import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' +import { getBaseClasses } from '../../../src/utils' import { getModels, MODEL_TYPE } from '../../../src/modelLoader' +import { buildGoogleCredentials } from '../../../src/google-utils' class GoogleVertexAI_LLMs implements INode { label: string @@ -83,28 +84,6 @@ class GoogleVertexAI_LLMs implements INode { } async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { - const credentialData = await getCredentialData(nodeData.credential ?? '', options) - const googleApplicationCredentialFilePath = getCredentialParam('googleApplicationCredentialFilePath', credentialData, nodeData) - const googleApplicationCredential = getCredentialParam('googleApplicationCredential', credentialData, nodeData) - const projectID = getCredentialParam('projectID', credentialData, nodeData) - - const authOptions: any = {} - if (Object.keys(credentialData).length !== 0) { - if (!googleApplicationCredentialFilePath && !googleApplicationCredential) - throw new Error('Please specify your Google Application Credential') - if (!googleApplicationCredentialFilePath && !googleApplicationCredential) - throw new Error( - 'Error: More than one component has been inputted. Please use only one of the following: Google Application Credential File Path or Google Credential JSON Object' - ) - - if (googleApplicationCredentialFilePath && !googleApplicationCredential) - authOptions.keyFile = googleApplicationCredentialFilePath - else if (!googleApplicationCredentialFilePath && googleApplicationCredential) - authOptions.credentials = JSON.parse(googleApplicationCredential) - - if (projectID) authOptions.projectId = projectID - } - const temperature = nodeData.inputs?.temperature as string const modelName = nodeData.inputs?.modelName as string const maxOutputTokens = nodeData.inputs?.maxOutputTokens as string @@ -115,7 +94,9 @@ class GoogleVertexAI_LLMs implements INode { temperature: parseFloat(temperature), model: modelName } - if (Object.keys(authOptions).length !== 0) obj.authOptions = authOptions + + const authOptions = await buildGoogleCredentials(nodeData, options) + if (authOptions && Object.keys(authOptions).length !== 0) obj.authOptions = authOptions if (maxOutputTokens) obj.maxOutputTokens = parseInt(maxOutputTokens, 10) if (topP) obj.topP = parseFloat(topP) diff --git a/packages/components/nodes/llms/HuggingFaceInference/core.ts b/packages/components/nodes/llms/HuggingFaceInference/core.ts index eb99d4a30..d7aa1ebec 100644 --- a/packages/components/nodes/llms/HuggingFaceInference/core.ts +++ b/packages/components/nodes/llms/HuggingFaceInference/core.ts @@ -78,6 +78,8 @@ export class HuggingFaceInference extends LLM implements HFInput { async _call(prompt: string, options: this['ParsedCallOptions']): Promise { const { HfInference } = await HuggingFaceInference.imports() const hf = new HfInference(this.apiKey) + // v4 uses Inference Providers by default; only override if custom endpoint provided + const hfClient = this.endpoint ? hf.endpoint(this.endpoint) : hf const obj: any = { parameters: { // make it behave similar to openai, returning only the generated text @@ -90,12 +92,10 @@ export class HuggingFaceInference extends LLM implements HFInput { }, inputs: prompt } - if (this.endpoint) { - hf.endpoint(this.endpoint) - } else { + if (!this.endpoint) { obj.model = this.model } - const res = await this.caller.callWithOptions({ signal: options.signal }, hf.textGeneration.bind(hf), obj) + const res = await this.caller.callWithOptions({ signal: options.signal }, hfClient.textGeneration.bind(hfClient), obj) return res.generated_text } diff --git a/packages/components/nodes/llms/SambaNova/Sambanova.ts b/packages/components/nodes/llms/SambaNova/Sambanova.ts new file mode 100644 index 000000000..4cb76aefa --- /dev/null +++ b/packages/components/nodes/llms/SambaNova/Sambanova.ts @@ -0,0 +1,71 @@ +import { getBaseClasses, getCredentialData, getCredentialParam, ICommonObject, INode, INodeData, INodeParams } from '../../../src' +import { OpenAI } from '@langchain/openai' +import { BaseCache } from '@langchain/core/caches' + +class Sambanova_LLMs implements INode { + label: string + name: string + version: number + type: string + icon: string + category: string + description: string + baseClasses: string[] + credential: INodeParams + inputs: INodeParams[] + + constructor() { + this.label = 'Sambanova' + this.name = 'sambanova' + this.version = 1.0 + this.type = 'Sambanova' + this.icon = 'sambanova.png' + this.category = 'LLMs' + this.description = 'Wrapper around Sambanova API for large language models' + this.baseClasses = [this.type, ...getBaseClasses(OpenAI)] + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + credentialNames: ['sambanovaApi'] + } + this.inputs = [ + { + label: 'Cache', + name: 'cache', + type: 'BaseCache', + optional: true + }, + { + label: 'Model Name', + name: 'modelName', + type: 'string', + default: 'Meta-Llama-3.3-70B-Instruct', + description: 'For more details see https://docs.sambanova.ai/cloud/docs/get-started/supported-models', + optional: true + } + ] + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const cache = nodeData.inputs?.cache as BaseCache + const modelName = nodeData.inputs?.modelName as string + + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const sambanovaKey = getCredentialParam('sambanovaApiKey', credentialData, nodeData) + + const obj: any = { + model: modelName, + configuration: { + baseURL: 'https://api.sambanova.ai/v1', + apiKey: sambanovaKey + } + } + if (cache) obj.cache = cache + + const sambanova = new OpenAI(obj) + return sambanova + } +} + +module.exports = { nodeClass: Sambanova_LLMs } diff --git a/packages/components/nodes/llms/SambaNova/sambanova.png b/packages/components/nodes/llms/SambaNova/sambanova.png new file mode 100644 index 000000000..8bc16c5d5 Binary files /dev/null and b/packages/components/nodes/llms/SambaNova/sambanova.png differ diff --git a/packages/components/nodes/memory/AgentMemory/AgentMemory.ts b/packages/components/nodes/memory/AgentMemory/AgentMemory.ts index d317e138f..ca6c0ebed 100644 --- a/packages/components/nodes/memory/AgentMemory/AgentMemory.ts +++ b/packages/components/nodes/memory/AgentMemory/AgentMemory.ts @@ -108,6 +108,7 @@ class AgentMemory_Memory implements INode { const databaseType = nodeData.inputs?.databaseType as string const databaseEntities = options.databaseEntities as IDatabaseEntity const chatflowid = options.chatflowid as string + const orgId = options.orgId as string const appDataSource = options.appDataSource as DataSource let additionalConfiguration = {} @@ -135,7 +136,8 @@ class AgentMemory_Memory implements INode { threadId, appDataSource, databaseEntities, - chatflowid + chatflowid, + orgId } const recordManager = new SqliteSaver(args) return recordManager @@ -159,7 +161,8 @@ class AgentMemory_Memory implements INode { threadId, appDataSource, databaseEntities, - chatflowid + chatflowid, + orgId } const recordManager = new PostgresSaver(args) return recordManager @@ -184,7 +187,8 @@ class AgentMemory_Memory implements INode { threadId, appDataSource, databaseEntities, - chatflowid + chatflowid, + orgId } const recordManager = new MySQLSaver(args) return recordManager diff --git a/packages/components/nodes/memory/AgentMemory/MySQLAgentMemory/MySQLAgentMemory.ts b/packages/components/nodes/memory/AgentMemory/MySQLAgentMemory/MySQLAgentMemory.ts index af3ebe619..3eb5c9503 100644 --- a/packages/components/nodes/memory/AgentMemory/MySQLAgentMemory/MySQLAgentMemory.ts +++ b/packages/components/nodes/memory/AgentMemory/MySQLAgentMemory/MySQLAgentMemory.ts @@ -65,6 +65,7 @@ class MySQLAgentMemory_Memory implements INode { const databaseEntities = options.databaseEntities as IDatabaseEntity const chatflowid = options.chatflowid as string const appDataSource = options.appDataSource as DataSource + const orgId = options.orgId as string let additionalConfiguration = {} if (additionalConfig) { @@ -102,7 +103,8 @@ class MySQLAgentMemory_Memory implements INode { threadId, appDataSource, databaseEntities, - chatflowid + chatflowid, + orgId } const recordManager = new MySQLSaver(args) return recordManager diff --git a/packages/components/nodes/memory/AgentMemory/MySQLAgentMemory/mysqlSaver.ts b/packages/components/nodes/memory/AgentMemory/MySQLAgentMemory/mysqlSaver.ts index 5b41b0887..f55098ed7 100644 --- a/packages/components/nodes/memory/AgentMemory/MySQLAgentMemory/mysqlSaver.ts +++ b/packages/components/nodes/memory/AgentMemory/MySQLAgentMemory/mysqlSaver.ts @@ -242,7 +242,7 @@ export class MySQLSaver extends BaseCheckpointSaver implements MemoryMethods { } if (returnBaseMessages) { - return await mapChatMessageToBaseMessage(chatMessage) + return await mapChatMessageToBaseMessage(chatMessage, this.config.orgId) } let returnIMessages: IMessage[] = [] diff --git a/packages/components/nodes/memory/AgentMemory/PostgresAgentMemory/PostgresAgentMemory.ts b/packages/components/nodes/memory/AgentMemory/PostgresAgentMemory/PostgresAgentMemory.ts index cb6d041a6..2ab86f66e 100644 --- a/packages/components/nodes/memory/AgentMemory/PostgresAgentMemory/PostgresAgentMemory.ts +++ b/packages/components/nodes/memory/AgentMemory/PostgresAgentMemory/PostgresAgentMemory.ts @@ -65,6 +65,7 @@ class PostgresAgentMemory_Memory implements INode { const databaseEntities = options.databaseEntities as IDatabaseEntity const chatflowid = options.chatflowid as string const appDataSource = options.appDataSource as DataSource + const orgId = options.orgId as string let additionalConfiguration = {} if (additionalConfig) { @@ -101,7 +102,8 @@ class PostgresAgentMemory_Memory implements INode { threadId, appDataSource, databaseEntities, - chatflowid + chatflowid, + orgId } const recordManager = new PostgresSaver(args) return recordManager diff --git a/packages/components/nodes/memory/AgentMemory/PostgresAgentMemory/pgSaver.ts b/packages/components/nodes/memory/AgentMemory/PostgresAgentMemory/pgSaver.ts index 5bc69f663..59d6f7201 100644 --- a/packages/components/nodes/memory/AgentMemory/PostgresAgentMemory/pgSaver.ts +++ b/packages/components/nodes/memory/AgentMemory/PostgresAgentMemory/pgSaver.ts @@ -283,7 +283,7 @@ CREATE TABLE IF NOT EXISTS ${tableName} ( } if (returnBaseMessages) { - return await mapChatMessageToBaseMessage(chatMessage) + return await mapChatMessageToBaseMessage(chatMessage, this.config.orgId) } let returnIMessages: IMessage[] = [] diff --git a/packages/components/nodes/memory/AgentMemory/SQLiteAgentMemory/SQLiteAgentMemory.ts b/packages/components/nodes/memory/AgentMemory/SQLiteAgentMemory/SQLiteAgentMemory.ts index b301e3a88..29c9b0a37 100644 --- a/packages/components/nodes/memory/AgentMemory/SQLiteAgentMemory/SQLiteAgentMemory.ts +++ b/packages/components/nodes/memory/AgentMemory/SQLiteAgentMemory/SQLiteAgentMemory.ts @@ -51,6 +51,7 @@ class SQLiteAgentMemory_Memory implements INode { const databaseEntities = options.databaseEntities as IDatabaseEntity const chatflowid = options.chatflowid as string const appDataSource = options.appDataSource as DataSource + const orgId = options.orgId as string let additionalConfiguration = {} if (additionalConfig) { @@ -76,7 +77,8 @@ class SQLiteAgentMemory_Memory implements INode { threadId, appDataSource, databaseEntities, - chatflowid + chatflowid, + orgId } const recordManager = new SqliteSaver(args) diff --git a/packages/components/nodes/memory/AgentMemory/SQLiteAgentMemory/sqliteSaver.ts b/packages/components/nodes/memory/AgentMemory/SQLiteAgentMemory/sqliteSaver.ts index 351c78961..4ca0fa137 100644 --- a/packages/components/nodes/memory/AgentMemory/SQLiteAgentMemory/sqliteSaver.ts +++ b/packages/components/nodes/memory/AgentMemory/SQLiteAgentMemory/sqliteSaver.ts @@ -266,7 +266,7 @@ CREATE TABLE IF NOT EXISTS ${tableName} ( } if (returnBaseMessages) { - return await mapChatMessageToBaseMessage(chatMessage) + return await mapChatMessageToBaseMessage(chatMessage, this.config.orgId) } let returnIMessages: IMessage[] = [] diff --git a/packages/components/nodes/memory/AgentMemory/interface.ts b/packages/components/nodes/memory/AgentMemory/interface.ts index e2be67320..2e036cde6 100644 --- a/packages/components/nodes/memory/AgentMemory/interface.ts +++ b/packages/components/nodes/memory/AgentMemory/interface.ts @@ -9,6 +9,7 @@ export type SaverOptions = { appDataSource: DataSource databaseEntities: IDatabaseEntity chatflowid: string + orgId: string } export interface CheckpointTuple { diff --git a/packages/components/nodes/memory/BufferMemory/BufferMemory.ts b/packages/components/nodes/memory/BufferMemory/BufferMemory.ts index 972301daf..1a46e0d3c 100644 --- a/packages/components/nodes/memory/BufferMemory/BufferMemory.ts +++ b/packages/components/nodes/memory/BufferMemory/BufferMemory.ts @@ -61,6 +61,7 @@ class BufferMemory_Memory implements INode { const appDataSource = options.appDataSource as DataSource const databaseEntities = options.databaseEntities as IDatabaseEntity const chatflowid = options.chatflowid as string + const orgId = options.orgId as string return new BufferMemoryExtended({ returnMessages: true, @@ -68,7 +69,8 @@ class BufferMemory_Memory implements INode { sessionId, appDataSource, databaseEntities, - chatflowid + chatflowid, + orgId }) } } @@ -78,12 +80,14 @@ interface BufferMemoryExtendedInput { appDataSource: DataSource databaseEntities: IDatabaseEntity chatflowid: string + orgId: string } class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods { appDataSource: DataSource databaseEntities: IDatabaseEntity chatflowid: string + orgId: string sessionId = '' constructor(fields: BufferMemoryInput & BufferMemoryExtendedInput) { @@ -92,6 +96,7 @@ class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods { this.appDataSource = fields.appDataSource this.databaseEntities = fields.databaseEntities this.chatflowid = fields.chatflowid + this.orgId = fields.orgId } async getChatMessages( @@ -117,7 +122,7 @@ class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods { } if (returnBaseMessages) { - return await mapChatMessageToBaseMessage(chatMessage) + return await mapChatMessageToBaseMessage(chatMessage, this.orgId) } let returnIMessages: IMessage[] = [] diff --git a/packages/components/nodes/memory/BufferWindowMemory/BufferWindowMemory.ts b/packages/components/nodes/memory/BufferWindowMemory/BufferWindowMemory.ts index 82cfd9291..e63e10626 100644 --- a/packages/components/nodes/memory/BufferWindowMemory/BufferWindowMemory.ts +++ b/packages/components/nodes/memory/BufferWindowMemory/BufferWindowMemory.ts @@ -69,6 +69,7 @@ class BufferWindowMemory_Memory implements INode { const appDataSource = options.appDataSource as DataSource const databaseEntities = options.databaseEntities as IDatabaseEntity const chatflowid = options.chatflowid as string + const orgId = options.orgId as string const obj: Partial & BufferMemoryExtendedInput = { returnMessages: true, @@ -77,7 +78,8 @@ class BufferWindowMemory_Memory implements INode { k: parseInt(k, 10), appDataSource, databaseEntities, - chatflowid + chatflowid, + orgId } return new BufferWindowMemoryExtended(obj) @@ -89,12 +91,14 @@ interface BufferMemoryExtendedInput { appDataSource: DataSource databaseEntities: IDatabaseEntity chatflowid: string + orgId: string } class BufferWindowMemoryExtended extends FlowiseWindowMemory implements MemoryMethods { appDataSource: DataSource databaseEntities: IDatabaseEntity chatflowid: string + orgId: string sessionId = '' constructor(fields: BufferWindowMemoryInput & BufferMemoryExtendedInput) { @@ -103,6 +107,7 @@ class BufferWindowMemoryExtended extends FlowiseWindowMemory implements MemoryMe this.appDataSource = fields.appDataSource this.databaseEntities = fields.databaseEntities this.chatflowid = fields.chatflowid + this.orgId = fields.orgId } async getChatMessages( @@ -134,7 +139,7 @@ class BufferWindowMemoryExtended extends FlowiseWindowMemory implements MemoryMe } if (returnBaseMessages) { - return await mapChatMessageToBaseMessage(chatMessage) + return await mapChatMessageToBaseMessage(chatMessage, this.orgId) } let returnIMessages: IMessage[] = [] diff --git a/packages/components/nodes/memory/ConversationSummaryBufferMemory/ConversationSummaryBufferMemory.ts b/packages/components/nodes/memory/ConversationSummaryBufferMemory/ConversationSummaryBufferMemory.ts index c45f93484..f8d3c00f0 100644 --- a/packages/components/nodes/memory/ConversationSummaryBufferMemory/ConversationSummaryBufferMemory.ts +++ b/packages/components/nodes/memory/ConversationSummaryBufferMemory/ConversationSummaryBufferMemory.ts @@ -78,6 +78,7 @@ class ConversationSummaryBufferMemory_Memory implements INode { const appDataSource = options.appDataSource as DataSource const databaseEntities = options.databaseEntities as IDatabaseEntity const chatflowid = options.chatflowid as string + const orgId = options.orgId as string const obj: ConversationSummaryBufferMemoryInput & BufferMemoryExtendedInput = { llm: model, @@ -87,7 +88,8 @@ class ConversationSummaryBufferMemory_Memory implements INode { returnMessages: true, appDataSource, databaseEntities, - chatflowid + chatflowid, + orgId } return new ConversationSummaryBufferMemoryExtended(obj) @@ -99,12 +101,14 @@ interface BufferMemoryExtendedInput { appDataSource: DataSource databaseEntities: IDatabaseEntity chatflowid: string + orgId: string } class ConversationSummaryBufferMemoryExtended extends FlowiseSummaryBufferMemory implements MemoryMethods { appDataSource: DataSource databaseEntities: IDatabaseEntity chatflowid: string + orgId: string sessionId = '' constructor(fields: ConversationSummaryBufferMemoryInput & BufferMemoryExtendedInput) { @@ -113,6 +117,7 @@ class ConversationSummaryBufferMemoryExtended extends FlowiseSummaryBufferMemory this.appDataSource = fields.appDataSource this.databaseEntities = fields.databaseEntities this.chatflowid = fields.chatflowid + this.orgId = fields.orgId } async getChatMessages( @@ -137,7 +142,7 @@ class ConversationSummaryBufferMemoryExtended extends FlowiseSummaryBufferMemory chatMessage.unshift(...prependMessages) } - let baseMessages = await mapChatMessageToBaseMessage(chatMessage) + let baseMessages = await mapChatMessageToBaseMessage(chatMessage, this.orgId) // Prune baseMessages if it exceeds max token limit if (this.movingSummaryBuffer) { diff --git a/packages/components/nodes/memory/ConversationSummaryMemory/ConversationSummaryMemory.ts b/packages/components/nodes/memory/ConversationSummaryMemory/ConversationSummaryMemory.ts index 6c4d078ba..44cb09029 100644 --- a/packages/components/nodes/memory/ConversationSummaryMemory/ConversationSummaryMemory.ts +++ b/packages/components/nodes/memory/ConversationSummaryMemory/ConversationSummaryMemory.ts @@ -69,6 +69,7 @@ class ConversationSummaryMemory_Memory implements INode { const appDataSource = options.appDataSource as DataSource const databaseEntities = options.databaseEntities as IDatabaseEntity const chatflowid = options.chatflowid as string + const orgId = options.orgId as string const obj: ConversationSummaryMemoryInput & BufferMemoryExtendedInput = { llm: model, @@ -77,7 +78,8 @@ class ConversationSummaryMemory_Memory implements INode { sessionId, appDataSource, databaseEntities, - chatflowid + chatflowid, + orgId } return new ConversationSummaryMemoryExtended(obj) @@ -89,12 +91,14 @@ interface BufferMemoryExtendedInput { appDataSource: DataSource databaseEntities: IDatabaseEntity chatflowid: string + orgId: string } class ConversationSummaryMemoryExtended extends FlowiseSummaryMemory implements MemoryMethods { appDataSource: DataSource databaseEntities: IDatabaseEntity chatflowid: string + orgId: string sessionId = '' constructor(fields: ConversationSummaryMemoryInput & BufferMemoryExtendedInput) { @@ -103,6 +107,7 @@ class ConversationSummaryMemoryExtended extends FlowiseSummaryMemory implements this.appDataSource = fields.appDataSource this.databaseEntities = fields.databaseEntities this.chatflowid = fields.chatflowid + this.orgId = fields.orgId } async getChatMessages( @@ -128,7 +133,7 @@ class ConversationSummaryMemoryExtended extends FlowiseSummaryMemory implements chatMessage.unshift(...prependMessages) } - const baseMessages = await mapChatMessageToBaseMessage(chatMessage) + const baseMessages = await mapChatMessageToBaseMessage(chatMessage, this.orgId) // Get summary if (this.llm && typeof this.llm !== 'string') { diff --git a/packages/components/nodes/memory/DynamoDb/DynamoDb.ts b/packages/components/nodes/memory/DynamoDb/DynamoDb.ts index 0ad0354f4..91e4e7298 100644 --- a/packages/components/nodes/memory/DynamoDb/DynamoDb.ts +++ b/packages/components/nodes/memory/DynamoDb/DynamoDb.ts @@ -125,6 +125,8 @@ const initializeDynamoDB = async (nodeData: INodeData, options: ICommonObject): config }) + const orgId = options.orgId as string + const memory = new BufferMemoryExtended({ memoryKey: memoryKey ?? 'chat_history', chatHistory: dynamoDb, @@ -132,7 +134,8 @@ const initializeDynamoDB = async (nodeData: INodeData, options: ICommonObject): dynamodbClient: client, tableName, partitionKey, - dynamoKey: { [partitionKey]: { S: sessionId } } + dynamoKey: { [partitionKey]: { S: sessionId } }, + orgId }) return memory } @@ -143,6 +146,7 @@ interface BufferMemoryExtendedInput { tableName: string partitionKey: string dynamoKey: Record + orgId: string } interface DynamoDBSerializedChatMessage { @@ -165,6 +169,7 @@ class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods { private dynamoKey: Record private messageAttributeName: string sessionId = '' + orgId = '' dynamodbClient: DynamoDBClient constructor(fields: BufferMemoryInput & BufferMemoryExtendedInput) { @@ -174,6 +179,7 @@ class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods { this.tableName = fields.tableName this.partitionKey = fields.partitionKey this.dynamoKey = fields.dynamoKey + this.orgId = fields.orgId } overrideDynamoKey(overrideSessionId = '') { @@ -260,7 +266,7 @@ class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods { .filter((x): x is StoredMessage => x.type !== undefined && x.data.content !== undefined) const baseMessages = messages.map(mapStoredMessageToChatMessage) if (prependMessages?.length) { - baseMessages.unshift(...(await mapChatMessageToBaseMessage(prependMessages))) + baseMessages.unshift(...(await mapChatMessageToBaseMessage(prependMessages, this.orgId))) } return returnBaseMessages ? baseMessages : convertBaseMessagetoIMessage(baseMessages) } diff --git a/packages/components/nodes/memory/Mem0/Mem0.ts b/packages/components/nodes/memory/Mem0/Mem0.ts index ba7960163..9be79aaba 100644 --- a/packages/components/nodes/memory/Mem0/Mem0.ts +++ b/packages/components/nodes/memory/Mem0/Mem0.ts @@ -15,6 +15,12 @@ interface BufferMemoryExtendedInput { chatflowid: string } +interface NodeFields extends Mem0MemoryInput, Mem0MemoryExtendedInput, BufferMemoryExtendedInput { + searchOnly: boolean + useFlowiseChatId: boolean + input: string +} + class Mem0_Memory implements INode { label: string name: string @@ -143,14 +149,15 @@ class Mem0_Memory implements INode { ] } - async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { - return await initializeMem0(nodeData, options) + async init(nodeData: INodeData, input: string, options: ICommonObject): Promise { + return await initializeMem0(nodeData, input, options) } } -const initializeMem0 = async (nodeData: INodeData, options: ICommonObject): Promise => { +const initializeMem0 = async (nodeData: INodeData, input: string, options: ICommonObject): Promise => { const initialUserId = nodeData.inputs?.user_id as string const useFlowiseChatId = nodeData.inputs?.useFlowiseChatId as boolean + const orgId = options.orgId as string if (!useFlowiseChatId && !initialUserId) { throw new Error('User ID field cannot be empty when "Use Flowise Chat ID" is OFF.') @@ -183,23 +190,24 @@ const initializeMem0 = async (nodeData: INodeData, options: ICommonObject): Prom filters: (nodeData.inputs?.filters as Record) || {} } - const obj: Mem0MemoryInput & Mem0MemoryExtendedInput & BufferMemoryExtendedInput & { searchOnly: boolean; useFlowiseChatId: boolean } = - { - apiKey: apiKey, - humanPrefix: nodeData.inputs?.humanPrefix as string, - aiPrefix: nodeData.inputs?.aiPrefix as string, - inputKey: nodeData.inputs?.inputKey as string, - sessionId: constructorSessionId, - mem0Options: mem0Options, - memoryOptions: memoryOptions, - separateMessages: false, - returnMessages: false, - appDataSource: options.appDataSource as DataSource, - databaseEntities: options.databaseEntities as IDatabaseEntity, - chatflowid: options.chatflowid as string, - searchOnly: (nodeData.inputs?.searchOnly as boolean) || false, - useFlowiseChatId: useFlowiseChatId - } + const obj: NodeFields = { + apiKey: apiKey, + humanPrefix: nodeData.inputs?.humanPrefix as string, + aiPrefix: nodeData.inputs?.aiPrefix as string, + inputKey: nodeData.inputs?.inputKey as string, + sessionId: constructorSessionId, + mem0Options: mem0Options, + memoryOptions: memoryOptions, + separateMessages: false, + returnMessages: false, + appDataSource: options.appDataSource as DataSource, + databaseEntities: options.databaseEntities as IDatabaseEntity, + chatflowid: options.chatflowid as string, + searchOnly: (nodeData.inputs?.searchOnly as boolean) || false, + useFlowiseChatId: useFlowiseChatId, + input: input, + orgId: orgId + } return new Mem0MemoryExtended(obj) } @@ -207,11 +215,13 @@ const initializeMem0 = async (nodeData: INodeData, options: ICommonObject): Prom interface Mem0MemoryExtendedInput extends Mem0MemoryInput { memoryOptions?: MemoryOptions | SearchOptions useFlowiseChatId: boolean + orgId: string } class Mem0MemoryExtended extends BaseMem0Memory implements MemoryMethods { initialUserId: string userId: string + orgId: string memoryKey: string inputKey: string appDataSource: DataSource @@ -219,10 +229,9 @@ class Mem0MemoryExtended extends BaseMem0Memory implements MemoryMethods { chatflowid: string searchOnly: boolean useFlowiseChatId: boolean + input: string - constructor( - fields: Mem0MemoryInput & Mem0MemoryExtendedInput & BufferMemoryExtendedInput & { searchOnly: boolean; useFlowiseChatId: boolean } - ) { + constructor(fields: NodeFields) { super(fields) this.initialUserId = fields.memoryOptions?.user_id ?? '' this.userId = this.initialUserId @@ -233,6 +242,8 @@ class Mem0MemoryExtended extends BaseMem0Memory implements MemoryMethods { this.chatflowid = fields.chatflowid this.searchOnly = fields.searchOnly this.useFlowiseChatId = fields.useFlowiseChatId + this.input = fields.input + this.orgId = fields.orgId } // Selects Mem0 user_id based on toggle state (Flowise chat ID or input field) @@ -318,11 +329,16 @@ class Mem0MemoryExtended extends BaseMem0Memory implements MemoryMethods { if (prependMessages?.length) { returnIMessages.unshift(...prependMessages) // Reverted to original simpler unshift - chatMessage.unshift(...(prependMessages as any)) // Cast as any + chatMessage.unshift(...(prependMessages as any)) } if (returnBaseMessages) { - const memoryVariables = await this.loadMemoryVariables({}, overrideUserId) + const memoryVariables = await this.loadMemoryVariables( + { + [this.inputKey]: this.input ?? '' + }, + overrideUserId + ) const mem0History = memoryVariables[this.memoryKey] if (mem0History && typeof mem0History === 'string') { @@ -337,7 +353,7 @@ class Mem0MemoryExtended extends BaseMem0Memory implements MemoryMethods { console.warn('Mem0 history is not a string, cannot prepend directly.') } - return await mapChatMessageToBaseMessage(chatMessage) + return await mapChatMessageToBaseMessage(chatMessage, this.orgId) } return returnIMessages diff --git a/packages/components/nodes/memory/MongoDBMemory/MongoDBMemory.ts b/packages/components/nodes/memory/MongoDBMemory/MongoDBMemory.ts index df70c4949..0fad704ef 100644 --- a/packages/components/nodes/memory/MongoDBMemory/MongoDBMemory.ts +++ b/packages/components/nodes/memory/MongoDBMemory/MongoDBMemory.ts @@ -88,9 +88,12 @@ const initializeMongoDB = async (nodeData: INodeData, options: ICommonObject): P const mongoDBConnectUrl = getCredentialParam('mongoDBConnectUrl', credentialData, nodeData) const driverInfo = { name: 'Flowise', version: (await getVersion()).version } + const orgId = options.orgId as string + return new BufferMemoryExtended({ memoryKey: memoryKey ?? 'chat_history', sessionId, + orgId, mongoConnection: { databaseName, collectionName, @@ -102,6 +105,7 @@ const initializeMongoDB = async (nodeData: INodeData, options: ICommonObject): P interface BufferMemoryExtendedInput { sessionId: string + orgId: string mongoConnection: { databaseName: string collectionName: string @@ -112,6 +116,7 @@ interface BufferMemoryExtendedInput { class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods { sessionId = '' + orgId = '' mongoConnection: { databaseName: string collectionName: string @@ -122,6 +127,7 @@ class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods { constructor(fields: BufferMemoryInput & BufferMemoryExtendedInput) { super(fields) this.sessionId = fields.sessionId + this.orgId = fields.orgId this.mongoConnection = fields.mongoConnection } @@ -138,7 +144,7 @@ class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods { const messages = document?.messages || [] const baseMessages = messages.map(mapStoredMessageToChatMessage) if (prependMessages?.length) { - baseMessages.unshift(...(await mapChatMessageToBaseMessage(prependMessages))) + baseMessages.unshift(...(await mapChatMessageToBaseMessage(prependMessages, this.orgId))) } await client.close() diff --git a/packages/components/nodes/memory/RedisBackedChatMemory/RedisBackedChatMemory.ts b/packages/components/nodes/memory/RedisBackedChatMemory/RedisBackedChatMemory.ts index e1813fae7..757cb68f2 100644 --- a/packages/components/nodes/memory/RedisBackedChatMemory/RedisBackedChatMemory.ts +++ b/packages/components/nodes/memory/RedisBackedChatMemory/RedisBackedChatMemory.ts @@ -88,6 +88,7 @@ const initializeRedis = async (nodeData: INodeData, options: ICommonObject): Pro const credentialData = await getCredentialData(nodeData.credential ?? '', options) const redisUrl = getCredentialParam('redisUrl', credentialData, nodeData) + const orgId = options.orgId as string const redisOptions = redisUrl ? redisUrl @@ -104,7 +105,8 @@ const initializeRedis = async (nodeData: INodeData, options: ICommonObject): Pro sessionId, windowSize, sessionTTL, - redisOptions + redisOptions, + orgId }) return memory @@ -114,11 +116,13 @@ interface BufferMemoryExtendedInput { sessionId: string windowSize?: number sessionTTL?: number + orgId: string redisOptions: RedisOptions | string } class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods { sessionId = '' + orgId = '' windowSize?: number sessionTTL?: number redisOptions: RedisOptions | string @@ -128,6 +132,7 @@ class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods { this.sessionId = fields.sessionId this.windowSize = fields.windowSize this.sessionTTL = fields.sessionTTL + this.orgId = fields.orgId this.redisOptions = fields.redisOptions } @@ -165,7 +170,7 @@ class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods { const orderedMessages = rawStoredMessages.reverse().map((message) => JSON.parse(message)) const baseMessages = orderedMessages.map(mapStoredMessageToChatMessage) if (prependMessages?.length) { - baseMessages.unshift(...(await mapChatMessageToBaseMessage(prependMessages))) + baseMessages.unshift(...(await mapChatMessageToBaseMessage(prependMessages, this.orgId))) } return returnBaseMessages ? baseMessages : convertBaseMessagetoIMessage(baseMessages) }) diff --git a/packages/components/nodes/memory/UpstashRedisBackedChatMemory/UpstashRedisBackedChatMemory.ts b/packages/components/nodes/memory/UpstashRedisBackedChatMemory/UpstashRedisBackedChatMemory.ts index 5c3d74dd5..3dd7ea502 100644 --- a/packages/components/nodes/memory/UpstashRedisBackedChatMemory/UpstashRedisBackedChatMemory.ts +++ b/packages/components/nodes/memory/UpstashRedisBackedChatMemory/UpstashRedisBackedChatMemory.ts @@ -100,13 +100,14 @@ const initalizeUpstashRedis = async (nodeData: INodeData, options: ICommonObject sessionTTL, client }) - + const orgId = options.orgId as string const memory = new BufferMemoryExtended({ memoryKey: memoryKey ?? 'chat_history', chatHistory: redisChatMessageHistory, sessionId, sessionTTL, - redisClient: client + redisClient: client, + orgId }) return memory @@ -115,11 +116,13 @@ const initalizeUpstashRedis = async (nodeData: INodeData, options: ICommonObject interface BufferMemoryExtendedInput { redisClient: Redis sessionId: string + orgId: string sessionTTL?: number } class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods { sessionId = '' + orgId = '' redisClient: Redis sessionTTL?: number @@ -128,6 +131,7 @@ class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods { this.sessionId = fields.sessionId this.redisClient = fields.redisClient this.sessionTTL = fields.sessionTTL + this.orgId = fields.orgId } async getChatMessages( @@ -143,7 +147,7 @@ class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods { const previousMessages = orderedMessages.filter((x): x is StoredMessage => x.type !== undefined && x.data.content !== undefined) const baseMessages = previousMessages.map(mapStoredMessageToChatMessage) if (prependMessages?.length) { - baseMessages.unshift(...(await mapChatMessageToBaseMessage(prependMessages))) + baseMessages.unshift(...(await mapChatMessageToBaseMessage(prependMessages, this.orgId))) } return returnBaseMessages ? baseMessages : convertBaseMessagetoIMessage(baseMessages) } diff --git a/packages/components/nodes/memory/ZepMemory/ZepMemory.ts b/packages/components/nodes/memory/ZepMemory/ZepMemory.ts index 7832d0ff5..a49d3eb4e 100644 --- a/packages/components/nodes/memory/ZepMemory/ZepMemory.ts +++ b/packages/components/nodes/memory/ZepMemory/ZepMemory.ts @@ -119,6 +119,7 @@ const initializeZep = async (nodeData: INodeData, options: ICommonObject): Promi const credentialData = await getCredentialData(nodeData.credential ?? '', options) const apiKey = getCredentialParam('apiKey', credentialData, nodeData) + const orgId = options.orgId as string const obj: ZepMemoryInput & ZepMemoryExtendedInput = { baseURL, aiPrefix, @@ -127,6 +128,7 @@ const initializeZep = async (nodeData: INodeData, options: ICommonObject): Promi memoryKey, inputKey, sessionId, + orgId, k: k ? parseInt(k, 10) : undefined } if (apiKey) obj.apiKey = apiKey @@ -136,14 +138,17 @@ const initializeZep = async (nodeData: INodeData, options: ICommonObject): Promi interface ZepMemoryExtendedInput { k?: number + orgId: string } class ZepMemoryExtended extends ZepMemory implements MemoryMethods { lastN?: number + orgId = '' constructor(fields: ZepMemoryInput & ZepMemoryExtendedInput) { super(fields) this.lastN = fields.k + this.orgId = fields.orgId } async loadMemoryVariables(values: InputValues, overrideSessionId = ''): Promise { @@ -176,7 +181,7 @@ class ZepMemoryExtended extends ZepMemory implements MemoryMethods { const memoryVariables = await this.loadMemoryVariables({}, id) const baseMessages = memoryVariables[this.memoryKey] if (prependMessages?.length) { - baseMessages.unshift(...(await mapChatMessageToBaseMessage(prependMessages))) + baseMessages.unshift(...(await mapChatMessageToBaseMessage(prependMessages, this.orgId))) } return returnBaseMessages ? baseMessages : convertBaseMessagetoIMessage(baseMessages) } diff --git a/packages/components/nodes/memory/ZepMemoryCloud/ZepMemoryCloud.ts b/packages/components/nodes/memory/ZepMemoryCloud/ZepMemoryCloud.ts index 237e026c7..d6068b01e 100644 --- a/packages/components/nodes/memory/ZepMemoryCloud/ZepMemoryCloud.ts +++ b/packages/components/nodes/memory/ZepMemoryCloud/ZepMemoryCloud.ts @@ -113,6 +113,7 @@ const initializeZep = async (nodeData: INodeData, options: ICommonObject): Promi const credentialData = await getCredentialData(nodeData.credential ?? '', options) const apiKey = getCredentialParam('apiKey', credentialData, nodeData) + const orgId = options.orgId as string const obj: ZepMemoryInput & ZepMemoryExtendedInput = { apiKey, aiPrefix, @@ -121,7 +122,8 @@ const initializeZep = async (nodeData: INodeData, options: ICommonObject): Promi sessionId, inputKey, memoryType: memoryType, - returnMessages: true + returnMessages: true, + orgId } return new ZepMemoryExtended(obj) @@ -129,14 +131,17 @@ const initializeZep = async (nodeData: INodeData, options: ICommonObject): Promi interface ZepMemoryExtendedInput { memoryType?: 'perpetual' | 'message_window' + orgId: string } class ZepMemoryExtended extends ZepMemory implements MemoryMethods { memoryType: 'perpetual' | 'message_window' + orgId: string constructor(fields: ZepMemoryInput & ZepMemoryExtendedInput) { super(fields) this.memoryType = fields.memoryType ?? 'perpetual' + this.orgId = fields.orgId } async loadMemoryVariables(values: InputValues, overrideSessionId = ''): Promise { @@ -169,7 +174,7 @@ class ZepMemoryExtended extends ZepMemory implements MemoryMethods { const memoryVariables = await this.loadMemoryVariables({}, id) const baseMessages = memoryVariables[this.memoryKey] if (prependMessages?.length) { - baseMessages.unshift(...(await mapChatMessageToBaseMessage(prependMessages))) + baseMessages.unshift(...(await mapChatMessageToBaseMessage(prependMessages, this.orgId))) } return returnBaseMessages ? baseMessages : convertBaseMessagetoIMessage(baseMessages) } diff --git a/packages/components/nodes/multiagents/Supervisor/Supervisor.ts b/packages/components/nodes/multiagents/Supervisor/Supervisor.ts index 1cd78eae0..2babee9aa 100644 --- a/packages/components/nodes/multiagents/Supervisor/Supervisor.ts +++ b/packages/components/nodes/multiagents/Supervisor/Supervisor.ts @@ -19,8 +19,9 @@ import { AgentExecutor, JsonOutputToolsParser, ToolCallingAgentOutputParser } fr import { ChatMistralAI } from '@langchain/mistralai' import { ChatOpenAI } from '../../chatmodels/ChatOpenAI/FlowiseChatOpenAI' import { ChatAnthropic } from '../../chatmodels/ChatAnthropic/FlowiseChatAnthropic' -import { ChatGoogleGenerativeAI } from '../../chatmodels/ChatGoogleGenerativeAI/FlowiseChatGoogleGenerativeAI' import { addImagesToMessages, llmSupportsVision } from '../../../src/multiModalUtils' +import { ChatGoogleGenerativeAI } from '../../chatmodels/ChatGoogleGenerativeAI/FlowiseChatGoogleGenerativeAI' +import { AzureChatOpenAI } from '../../chatmodels/AzureChatOpenAI/FlowiseAzureChatOpenAI' const sysPrompt = `You are a supervisor tasked with managing a conversation between the following workers: {team_members}. Given the following user request, respond with the worker to act next. @@ -242,7 +243,7 @@ class Supervisor_MultiAgents implements INode { } } }) - } else if (llm instanceof ChatOpenAI) { + } else if (llm instanceof ChatOpenAI || llm instanceof AzureChatOpenAI) { let prompt = ChatPromptTemplate.fromMessages([ ['system', systemPrompt], new MessagesPlaceholder('messages'), diff --git a/packages/components/nodes/multiagents/Worker/Worker.ts b/packages/components/nodes/multiagents/Worker/Worker.ts index 5651135a1..06a5d059c 100644 --- a/packages/components/nodes/multiagents/Worker/Worker.ts +++ b/packages/components/nodes/multiagents/Worker/Worker.ts @@ -233,7 +233,7 @@ async function createAgent( sessionId: flowObj?.sessionId, chatId: flowObj?.chatId, input: flowObj?.input, - verbose: process.env.DEBUG === 'true', + verbose: process.env.DEBUG === 'true' ? true : false, maxIterations: maxIterations ? parseFloat(maxIterations) : undefined }) return executor diff --git a/packages/components/nodes/outputparsers/StructuredOutputParserAdvanced/StructuredOutputParserAdvanced.ts b/packages/components/nodes/outputparsers/StructuredOutputParserAdvanced/StructuredOutputParserAdvanced.ts index e9f559641..cc8a8ee97 100644 --- a/packages/components/nodes/outputparsers/StructuredOutputParserAdvanced/StructuredOutputParserAdvanced.ts +++ b/packages/components/nodes/outputparsers/StructuredOutputParserAdvanced/StructuredOutputParserAdvanced.ts @@ -2,8 +2,8 @@ import { getBaseClasses, INode, INodeData, INodeParams } from '../../../src' import { BaseOutputParser } from '@langchain/core/output_parsers' import { StructuredOutputParser as LangchainStructuredOutputParser } from 'langchain/output_parsers' import { CATEGORY } from '../OutputParserHelpers' -import { z } from 'zod' import { jsonrepair } from 'jsonrepair' +import { SecureZodSchemaParser } from '../../../src/secureZodParser' class AdvancedStructuredOutputParser implements INode { label: string @@ -57,10 +57,8 @@ class AdvancedStructuredOutputParser implements INode { const schemaString = nodeData.inputs?.exampleJson as string const autoFix = nodeData.inputs?.autofixParser as boolean - const zodSchemaFunction = new Function('z', `return ${schemaString}`) - const zodSchema = zodSchemaFunction(z) - try { + const zodSchema = SecureZodSchemaParser.parseZodSchema(schemaString) const structuredOutputParser = LangchainStructuredOutputParser.fromZodSchema(zodSchema) const baseParse = structuredOutputParser.parse diff --git a/packages/components/nodes/prompts/ChatPromptTemplate/ChatPromptTemplate.ts b/packages/components/nodes/prompts/ChatPromptTemplate/ChatPromptTemplate.ts index d3faf7127..9e562962a 100644 --- a/packages/components/nodes/prompts/ChatPromptTemplate/ChatPromptTemplate.ts +++ b/packages/components/nodes/prompts/ChatPromptTemplate/ChatPromptTemplate.ts @@ -1,7 +1,6 @@ import { ICommonObject, IDatabaseEntity, INode, INodeData, INodeParams } from '../../../src/Interface' -import { getBaseClasses, transformBracesWithColon } from '../../../src/utils' +import { getBaseClasses, transformBracesWithColon, getVars, executeJavaScriptCode, createCodeExecutionSandbox } from '../../../src/utils' import { ChatPromptTemplate, SystemMessagePromptTemplate, HumanMessagePromptTemplate } from '@langchain/core/prompts' -import { getVM } from '../../sequentialagents/commonUtils' import { DataSource } from 'typeorm' const defaultFunc = `const { AIMessage, HumanMessage, ToolMessage } = require('@langchain/core/messages'); @@ -12,7 +11,7 @@ return [ tool_calls: [ { id: "12345", - name: "calulator", + name: "calculator", args: { number1: 333382, number2: 1932, @@ -120,13 +119,28 @@ class ChatPromptTemplate_Prompts implements INode { ) { const appDataSource = options.appDataSource as DataSource const databaseEntities = options.databaseEntities as IDatabaseEntity - const vm = await getVM(appDataSource, databaseEntities, nodeData, {}) + const variables = await getVars(appDataSource, databaseEntities, nodeData, options) + const flow = { + chatflowId: options.chatflowid, + sessionId: options.sessionId, + chatId: options.chatId + } + + const sandbox = createCodeExecutionSandbox('', variables, flow) + try { - const response = await vm.run(`module.exports = async function() {${messageHistoryCode}}()`, __dirname) - if (!Array.isArray(response)) throw new Error('Returned message history must be an array') + const response = await executeJavaScriptCode(messageHistoryCode, sandbox, { + libraries: ['axios', '@langchain/core'] + }) + + const parsedResponse = JSON.parse(response) + + if (!Array.isArray(parsedResponse)) { + throw new Error('Returned message history must be an array') + } prompt = ChatPromptTemplate.fromMessages([ SystemMessagePromptTemplate.fromTemplate(systemMessagePrompt), - ...response, + ...parsedResponse, HumanMessagePromptTemplate.fromTemplate(humanMessagePrompt) ]) } catch (e) { diff --git a/packages/components/nodes/recordmanager/MySQLRecordManager/MySQLrecordManager.ts b/packages/components/nodes/recordmanager/MySQLRecordManager/MySQLrecordManager.ts index a1571b028..eb38144e7 100644 --- a/packages/components/nodes/recordmanager/MySQLRecordManager/MySQLrecordManager.ts +++ b/packages/components/nodes/recordmanager/MySQLRecordManager/MySQLrecordManager.ts @@ -205,8 +205,8 @@ class MySQLRecordManager implements RecordManagerInterface { } async createSchema(): Promise { + const dataSource = await this.getDataSource() try { - const dataSource = await this.getDataSource() const queryRunner = dataSource.createQueryRunner() const tableName = this.sanitizeTableName(this.tableName) @@ -241,6 +241,8 @@ class MySQLRecordManager implements RecordManagerInterface { return } throw e + } finally { + await dataSource.destroy() } } diff --git a/packages/components/nodes/recordmanager/PostgresRecordManager/PostgresRecordManager.ts b/packages/components/nodes/recordmanager/PostgresRecordManager/PostgresRecordManager.ts index ebb429af0..ab4b564e8 100644 --- a/packages/components/nodes/recordmanager/PostgresRecordManager/PostgresRecordManager.ts +++ b/packages/components/nodes/recordmanager/PostgresRecordManager/PostgresRecordManager.ts @@ -222,13 +222,11 @@ class PostgresRecordManager implements RecordManagerInterface { } async createSchema(): Promise { + const dataSource = await this.getDataSource() try { - const dataSource = await this.getDataSource() const queryRunner = dataSource.createQueryRunner() const tableName = this.sanitizeTableName(this.tableName) - await queryRunner.query('CREATE EXTENSION IF NOT EXISTS pgcrypto;') - await queryRunner.manager.query(` CREATE TABLE IF NOT EXISTS "${tableName}" ( uuid UUID PRIMARY KEY DEFAULT gen_random_uuid(), @@ -253,6 +251,8 @@ class PostgresRecordManager implements RecordManagerInterface { return } throw e + } finally { + await dataSource.destroy() } } @@ -260,9 +260,9 @@ class PostgresRecordManager implements RecordManagerInterface { const dataSource = await this.getDataSource() try { const queryRunner = dataSource.createQueryRunner() - const res = await queryRunner.manager.query('SELECT EXTRACT(EPOCH FROM CURRENT_TIMESTAMP) AS now') + const res = await queryRunner.manager.query('SELECT EXTRACT(EPOCH FROM CURRENT_TIMESTAMP) AS extract') await queryRunner.release() - return Number.parseFloat(res[0].now) + return Number.parseFloat(res[0].extract) } catch (error) { console.error('Error getting time in PostgresRecordManager:') throw error diff --git a/packages/components/nodes/recordmanager/SQLiteRecordManager/SQLiteRecordManager.ts b/packages/components/nodes/recordmanager/SQLiteRecordManager/SQLiteRecordManager.ts index c209f4956..4b7376041 100644 --- a/packages/components/nodes/recordmanager/SQLiteRecordManager/SQLiteRecordManager.ts +++ b/packages/components/nodes/recordmanager/SQLiteRecordManager/SQLiteRecordManager.ts @@ -179,8 +179,8 @@ class SQLiteRecordManager implements RecordManagerInterface { } async createSchema(): Promise { + const dataSource = await this.getDataSource() try { - const dataSource = await this.getDataSource() const queryRunner = dataSource.createQueryRunner() const tableName = this.sanitizeTableName(this.tableName) @@ -208,6 +208,8 @@ CREATE INDEX IF NOT EXISTS group_id_index ON "${tableName}" (group_id);`) return } throw e + } finally { + await dataSource.destroy() } } diff --git a/packages/components/nodes/retrievers/ExtractMetadataRetriever/ExtractMetadataRetriever.ts b/packages/components/nodes/retrievers/ExtractMetadataRetriever/ExtractMetadataRetriever.ts index 481684454..1e6205f62 100644 --- a/packages/components/nodes/retrievers/ExtractMetadataRetriever/ExtractMetadataRetriever.ts +++ b/packages/components/nodes/retrievers/ExtractMetadataRetriever/ExtractMetadataRetriever.ts @@ -3,8 +3,7 @@ import { VectorStore, VectorStoreRetriever, VectorStoreRetrieverInput } from '@l import { INode, INodeData, INodeParams, INodeOutputsValue } from '../../../src/Interface' import { handleEscapeCharacters } from '../../../src' import { z } from 'zod' -import { convertStructuredSchemaToZod, ExtractTool } from '../../sequentialagents/commonUtils' -import { ChatGoogleGenerativeAI } from '@langchain/google-genai' +import { convertStructuredSchemaToZod } from '../../sequentialagents/commonUtils' const queryPrefix = 'query' const defaultPrompt = `Extract keywords from the query: {{${queryPrefix}}}` @@ -31,7 +30,6 @@ class ExtractMetadataRetriever_Retrievers implements INode { this.category = 'Retrievers' this.description = 'Extract keywords/metadata from the query and use it to filter documents' this.baseClasses = [this.type, 'BaseRetriever'] - this.badge = 'BETA' this.inputs = [ { label: 'Vector Store', @@ -127,19 +125,8 @@ class ExtractMetadataRetriever_Retrievers implements INode { try { const structuredOutput = z.object(convertStructuredSchemaToZod(llmStructuredOutput)) - if (llm instanceof ChatGoogleGenerativeAI) { - const tool = new ExtractTool({ - schema: structuredOutput - }) - // @ts-ignore - const modelWithTool = llm.bind({ - tools: [tool] - }) as any - llm = modelWithTool - } else { - // @ts-ignore - llm = llm.withStructuredOutput(structuredOutput) - } + // @ts-ignore + llm = llm.withStructuredOutput(structuredOutput) } catch (exception) { console.error(exception) } diff --git a/packages/components/nodes/sequentialagents/Agent/Agent.ts b/packages/components/nodes/sequentialagents/Agent/Agent.ts index ad6262074..0933731f1 100644 --- a/packages/components/nodes/sequentialagents/Agent/Agent.ts +++ b/packages/components/nodes/sequentialagents/Agent/Agent.ts @@ -22,7 +22,13 @@ import { IStateWithMessages, ConversationHistorySelection } from '../../../src/Interface' -import { ToolCallingAgentOutputParser, AgentExecutor, SOURCE_DOCUMENTS_PREFIX, ARTIFACTS_PREFIX } from '../../../src/agents' +import { + ToolCallingAgentOutputParser, + AgentExecutor, + SOURCE_DOCUMENTS_PREFIX, + ARTIFACTS_PREFIX, + TOOL_ARGS_PREFIX +} from '../../../src/agents' import { extractOutputFromArray, getInputVariables, @@ -30,11 +36,12 @@ import { handleEscapeCharacters, prepareSandboxVars, removeInvalidImageMarkdown, - transformBracesWithColon + transformBracesWithColon, + executeJavaScriptCode, + createCodeExecutionSandbox } from '../../../src/utils' import { customGet, - getVM, processImageMessage, transformObjectPropertyToFunction, filterConversationHistory, @@ -680,7 +687,7 @@ async function createAgent( sessionId: flowObj?.sessionId, chatId: flowObj?.chatId, input: flowObj?.input, - verbose: process.env.DEBUG === 'true', + verbose: process.env.DEBUG === 'true' ? true : false, maxIterations: maxIterations ? parseFloat(maxIterations) : undefined }) return executor @@ -877,7 +884,7 @@ const getReturnOutput = async (nodeData: INodeData, input: string, options: ICom const updateStateMemory = nodeData.inputs?.updateStateMemory as string const selectedTab = tabIdentifier ? tabIdentifier.split(`_${nodeData.id}`)[0] : 'updateStateMemoryUI' - const variables = await getVars(appDataSource, databaseEntities, nodeData) + const variables = await getVars(appDataSource, databaseEntities, nodeData, options) const flow = { chatflowId: options.chatflowid, @@ -930,9 +937,11 @@ const getReturnOutput = async (nodeData: INodeData, input: string, options: ICom throw new Error(e) } } else if (selectedTab === 'updateStateMemoryCode' && updateStateMemoryCode) { - const vm = await getVM(appDataSource, databaseEntities, nodeData, flow) + const sandbox = createCodeExecutionSandbox(input, variables, flow) + try { - const response = await vm.run(`module.exports = async function() {${updateStateMemoryCode}}()`, __dirname) + const response = await executeJavaScriptCode(updateStateMemoryCode, sandbox) + if (typeof response !== 'object') throw new Error('Return output must be an object') return response } catch (e) { @@ -1041,6 +1050,17 @@ class ToolNode extends RunnableCallable } } + let toolInput + if (typeof output === 'string' && output.includes(TOOL_ARGS_PREFIX)) { + const outputArray = output.split(TOOL_ARGS_PREFIX) + output = outputArray[0] + try { + toolInput = JSON.parse(outputArray[1]) + } catch (e) { + console.error('Error parsing tool input from tool') + } + } + return new ToolMessage({ name: tool.name, content: typeof output === 'string' ? output : JSON.stringify(output), @@ -1048,11 +1068,11 @@ class ToolNode extends RunnableCallable additional_kwargs: { sourceDocuments, artifacts, - args: call.args, + args: toolInput ?? call.args, usedTools: [ { tool: tool.name ?? '', - toolInput: call.args, + toolInput: toolInput ?? call.args, toolOutput: output } ] diff --git a/packages/components/nodes/sequentialagents/Condition/Condition.ts b/packages/components/nodes/sequentialagents/Condition/Condition.ts index 1455c17b1..8cac63f61 100644 --- a/packages/components/nodes/sequentialagents/Condition/Condition.ts +++ b/packages/components/nodes/sequentialagents/Condition/Condition.ts @@ -10,8 +10,8 @@ import { ISeqAgentNode, ISeqAgentsState } from '../../../src/Interface' -import { checkCondition, customGet, getVM } from '../commonUtils' -import { getVars, prepareSandboxVars } from '../../../src/utils' +import { checkCondition, customGet } from '../commonUtils' +import { getVars, prepareSandboxVars, executeJavaScriptCode, createCodeExecutionSandbox } from '../../../src/utils' const howToUseCode = ` 1. Must return a string value at the end of function. For example: @@ -267,7 +267,7 @@ const runCondition = async (nodeData: INodeData, input: string, options: ICommon const tabIdentifier = nodeData.inputs?.[`${TAB_IDENTIFIER}_${nodeData.id}`] as string const selectedTab = tabIdentifier ? tabIdentifier.split(`_${nodeData.id}`)[0] : 'conditionUI' - const variables = await getVars(appDataSource, databaseEntities, nodeData) + const variables = await getVars(appDataSource, databaseEntities, nodeData, options) const flow = { chatflowId: options.chatflowid, @@ -279,9 +279,11 @@ const runCondition = async (nodeData: INodeData, input: string, options: ICommon } if (selectedTab === 'conditionFunction' && conditionFunction) { - const vm = await getVM(appDataSource, databaseEntities, nodeData, flow) + const sandbox = createCodeExecutionSandbox(input, variables, flow) + try { - const response = await vm.run(`module.exports = async function() {${conditionFunction}}()`, __dirname) + const response = await executeJavaScriptCode(conditionFunction, sandbox) + if (typeof response !== 'string') throw new Error('Condition function must return a string') return response } catch (e) { diff --git a/packages/components/nodes/sequentialagents/ConditionAgent/ConditionAgent.ts b/packages/components/nodes/sequentialagents/ConditionAgent/ConditionAgent.ts index ae56efe12..fada09305 100644 --- a/packages/components/nodes/sequentialagents/ConditionAgent/ConditionAgent.ts +++ b/packages/components/nodes/sequentialagents/ConditionAgent/ConditionAgent.ts @@ -16,18 +16,23 @@ import { ISeqAgentNode, ISeqAgentsState } from '../../../src/Interface' -import { getInputVariables, getVars, handleEscapeCharacters, prepareSandboxVars, transformBracesWithColon } from '../../../src/utils' import { - ExtractTool, + getInputVariables, + getVars, + handleEscapeCharacters, + prepareSandboxVars, + transformBracesWithColon, + executeJavaScriptCode, + createCodeExecutionSandbox +} from '../../../src/utils' +import { checkCondition, convertStructuredSchemaToZod, customGet, - getVM, transformObjectPropertyToFunction, filterConversationHistory, restructureMessages } from '../commonUtils' -import { ChatGoogleGenerativeAI } from '../../chatmodels/ChatGoogleGenerativeAI/FlowiseChatGoogleGenerativeAI' interface IConditionGridItem { variable: string @@ -485,20 +490,8 @@ const runCondition = async ( try { const structuredOutput = z.object(convertStructuredSchemaToZod(conditionAgentStructuredOutput)) - if (llm instanceof ChatGoogleGenerativeAI) { - const tool = new ExtractTool({ - schema: structuredOutput - }) - // @ts-ignore - const modelWithTool = llm.bind({ - tools: [tool], - signal: abortControllerSignal ? abortControllerSignal.signal : undefined - }) - model = modelWithTool - } else { - // @ts-ignore - model = llm.withStructuredOutput(structuredOutput) - } + // @ts-ignore + model = llm.withStructuredOutput(structuredOutput) } catch (exception) { console.error('Invalid JSON in Condition Agent Structured Output: ' + exception) model = llm @@ -540,7 +533,7 @@ const runCondition = async ( result = { ...jsonResult, additional_kwargs: { nodeId: nodeData.id } } } - const variables = await getVars(appDataSource, databaseEntities, nodeData) + const variables = await getVars(appDataSource, databaseEntities, nodeData, options) const flow = { chatflowId: options.chatflowid, @@ -553,9 +546,11 @@ const runCondition = async ( } if (selectedTab === 'conditionFunction' && conditionFunction) { - const vm = await getVM(appDataSource, databaseEntities, nodeData, flow) + const sandbox = createCodeExecutionSandbox(input, variables, flow) + try { - const response = await vm.run(`module.exports = async function() {${conditionFunction}}()`, __dirname) + const response = await executeJavaScriptCode(conditionFunction, sandbox) + if (typeof response !== 'string') throw new Error('Condition function must return a string') return response } catch (e) { diff --git a/packages/components/nodes/sequentialagents/CustomFunction/CustomFunction.ts b/packages/components/nodes/sequentialagents/CustomFunction/CustomFunction.ts index b7d831e17..c86edf0aa 100644 --- a/packages/components/nodes/sequentialagents/CustomFunction/CustomFunction.ts +++ b/packages/components/nodes/sequentialagents/CustomFunction/CustomFunction.ts @@ -1,6 +1,5 @@ -import { NodeVM } from '@flowiseai/nodevm' import { DataSource } from 'typeorm' -import { availableDependencies, defaultAllowBuiltInDep, getVars, handleEscapeCharacters, prepareSandboxVars } from '../../../src/utils' +import { getVars, handleEscapeCharacters, executeJavaScriptCode, createCodeExecutionSandbox } from '../../../src/utils' import { ICommonObject, IDatabaseEntity, INode, INodeData, INodeParams, ISeqAgentNode, ISeqAgentsState } from '../../../src/Interface' import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages' import { customGet } from '../commonUtils' @@ -102,7 +101,7 @@ class CustomFunction_SeqAgents implements INode { if (!sequentialNodes || !sequentialNodes.length) throw new Error('Custom function must have a predecessor!') const executeFunc = async (state: ISeqAgentsState) => { - const variables = await getVars(appDataSource, databaseEntities, nodeData) + const variables = await getVars(appDataSource, databaseEntities, nodeData, options) const flow = { chatflowId: options.chatflowid, sessionId: options.sessionId, @@ -154,44 +153,20 @@ class CustomFunction_SeqAgents implements INode { } } - let sandbox: any = { - $input: input, - util: undefined, - Symbol: undefined, - child_process: undefined, - fs: undefined, - process: undefined - } - sandbox['$vars'] = prepareSandboxVars(variables) - sandbox['$flow'] = flow + // Create additional sandbox variables + const additionalSandbox: ICommonObject = {} + // Add input variables to sandbox if (Object.keys(inputVars).length) { for (const item in inputVars) { - sandbox[`$${item}`] = inputVars[item] + additionalSandbox[`$${item}`] = inputVars[item] } } - const builtinDeps = process.env.TOOL_FUNCTION_BUILTIN_DEP - ? defaultAllowBuiltInDep.concat(process.env.TOOL_FUNCTION_BUILTIN_DEP.split(',')) - : defaultAllowBuiltInDep - const externalDeps = process.env.TOOL_FUNCTION_EXTERNAL_DEP ? process.env.TOOL_FUNCTION_EXTERNAL_DEP.split(',') : [] - const deps = availableDependencies.concat(externalDeps) + const sandbox = createCodeExecutionSandbox(input, variables, flow, additionalSandbox) - const nodeVMOptions = { - console: 'inherit', - sandbox, - require: { - external: { modules: deps }, - builtin: builtinDeps - }, - eval: false, - wasm: false, - timeout: 10000 - } as any - - const vm = new NodeVM(nodeVMOptions) try { - const response = await vm.run(`module.exports = async function() {${javascriptFunction}}()`, __dirname) + const response = await executeJavaScriptCode(javascriptFunction, sandbox) if (returnValueAs === 'stateObj') { if (typeof response !== 'object') { diff --git a/packages/components/nodes/sequentialagents/ExecuteFlow/ExecuteFlow.ts b/packages/components/nodes/sequentialagents/ExecuteFlow/ExecuteFlow.ts index 479ebfbb4..c1bc2fbca 100644 --- a/packages/components/nodes/sequentialagents/ExecuteFlow/ExecuteFlow.ts +++ b/packages/components/nodes/sequentialagents/ExecuteFlow/ExecuteFlow.ts @@ -1,13 +1,6 @@ -import { NodeVM } from '@flowiseai/nodevm' import { DataSource } from 'typeorm' -import { - availableDependencies, - defaultAllowBuiltInDep, - getCredentialData, - getCredentialParam, - getVars, - prepareSandboxVars -} from '../../../src/utils' +import { getCredentialData, getCredentialParam, getVars, executeJavaScriptCode, createCodeExecutionSandbox } from '../../../src/utils' +import { isValidUUID, isValidURL } from '../../../src/validator' import { ICommonObject, IDatabaseEntity, @@ -141,7 +134,8 @@ class ExecuteFlow_SeqAgents implements INode { return returnData } - const chatflows = await appDataSource.getRepository(databaseEntities['ChatFlow']).find() + const searchOptions = options.searchOptions || {} + const chatflows = await appDataSource.getRepository(databaseEntities['ChatFlow']).findBy(searchOptions) for (let i = 0; i < chatflows.length; i += 1) { const data = { @@ -176,6 +170,16 @@ class ExecuteFlow_SeqAgents implements INode { const baseURL = (nodeData.inputs?.baseURL as string) || (options.baseURL as string) const returnValueAs = nodeData.inputs?.returnValueAs as string + // Validate selectedFlowId is a valid UUID + if (!selectedFlowId || !isValidUUID(selectedFlowId)) { + throw new Error('Invalid flow ID: must be a valid UUID') + } + + // Validate baseURL is a valid URL + if (!baseURL || !isValidURL(baseURL)) { + throw new Error('Invalid base URL: must be a valid URL') + } + const credentialData = await getCredentialData(nodeData.credential ?? '', options) const chatflowApiKey = getCredentialParam('chatflowApiKey', credentialData, nodeData) @@ -189,7 +193,7 @@ class ExecuteFlow_SeqAgents implements INode { const chatId = options.chatId const executeFunc = async (state: ISeqAgentsState) => { - const variables = await getVars(appDataSource, databaseEntities, nodeData) + const variables = await getVars(appDataSource, databaseEntities, nodeData, options) let flowInput = '' if (seqExecuteFlowInput === 'userQuestion') { @@ -223,7 +227,7 @@ class ExecuteFlow_SeqAgents implements INode { } } - const options = { + const callOptions = { method: 'POST', headers: { 'Content-Type': 'application/json', @@ -232,18 +236,13 @@ class ExecuteFlow_SeqAgents implements INode { body: JSON.stringify(body) } - let sandbox: ICommonObject = { - $input: flowInput, - $callOptions: options, - $callBody: body, - util: undefined, - Symbol: undefined, - child_process: undefined, - fs: undefined, - process: undefined + // Create additional sandbox variables + const additionalSandbox: ICommonObject = { + $callOptions: callOptions, + $callBody: body } - sandbox['$vars'] = prepareSandboxVars(variables) - sandbox['$flow'] = flow + + const sandbox = createCodeExecutionSandbox(flowInput, variables, flow, additionalSandbox) const code = ` const fetch = require('node-fetch'); @@ -263,27 +262,10 @@ class ExecuteFlow_SeqAgents implements INode { } ` - const builtinDeps = process.env.TOOL_FUNCTION_BUILTIN_DEP - ? defaultAllowBuiltInDep.concat(process.env.TOOL_FUNCTION_BUILTIN_DEP.split(',')) - : defaultAllowBuiltInDep - const externalDeps = process.env.TOOL_FUNCTION_EXTERNAL_DEP ? process.env.TOOL_FUNCTION_EXTERNAL_DEP.split(',') : [] - const deps = availableDependencies.concat(externalDeps) - - const nodeVMOptions = { - console: 'inherit', - sandbox, - require: { - external: { modules: deps }, - builtin: builtinDeps - }, - eval: false, - wasm: false, - timeout: 10000 - } as any - - const vm = new NodeVM(nodeVMOptions) try { - let response = await vm.run(`module.exports = async function() {${code}}()`, __dirname) + let response = await executeJavaScriptCode(code, sandbox, { + useSandbox: false + }) if (typeof response === 'object') { response = JSON.stringify(response) diff --git a/packages/components/nodes/sequentialagents/LLMNode/LLMNode.ts b/packages/components/nodes/sequentialagents/LLMNode/LLMNode.ts index 371a8986f..e0b577fb2 100644 --- a/packages/components/nodes/sequentialagents/LLMNode/LLMNode.ts +++ b/packages/components/nodes/sequentialagents/LLMNode/LLMNode.ts @@ -24,20 +24,19 @@ import { getVars, handleEscapeCharacters, prepareSandboxVars, - transformBracesWithColon + transformBracesWithColon, + executeJavaScriptCode, + createCodeExecutionSandbox } from '../../../src/utils' import { - ExtractTool, convertStructuredSchemaToZod, customGet, - getVM, processImageMessage, transformObjectPropertyToFunction, filterConversationHistory, restructureMessages, checkMessageHistory } from '../commonUtils' -import { ChatGoogleGenerativeAI } from '../../chatmodels/ChatGoogleGenerativeAI/FlowiseChatGoogleGenerativeAI' const TAB_IDENTIFIER = 'selectedUpdateStateMemoryTab' const customOutputFuncDesc = `This is only applicable when you have a custom State at the START node. After agent execution, you might want to update the State values` @@ -513,19 +512,8 @@ async function createAgent( try { const structuredOutput = z.object(convertStructuredSchemaToZod(llmStructuredOutput)) - if (llm instanceof ChatGoogleGenerativeAI) { - const tool = new ExtractTool({ - schema: structuredOutput - }) - // @ts-ignore - const modelWithTool = llm.bind({ - tools: [tool] - }) as any - llm = modelWithTool - } else { - // @ts-ignore - llm = llm.withStructuredOutput(structuredOutput) - } + // @ts-ignore + llm = llm.withStructuredOutput(structuredOutput) } catch (exception) { console.error(exception) } @@ -668,7 +656,7 @@ const getReturnOutput = async (nodeData: INodeData, input: string, options: ICom const updateStateMemory = nodeData.inputs?.updateStateMemory as string const selectedTab = tabIdentifier ? tabIdentifier.split(`_${nodeData.id}`)[0] : 'updateStateMemoryUI' - const variables = await getVars(appDataSource, databaseEntities, nodeData) + const variables = await getVars(appDataSource, databaseEntities, nodeData, options) const flow = { chatflowId: options.chatflowid, @@ -721,9 +709,11 @@ const getReturnOutput = async (nodeData: INodeData, input: string, options: ICom throw new Error(e) } } else if (selectedTab === 'updateStateMemoryCode' && updateStateMemoryCode) { - const vm = await getVM(appDataSource, databaseEntities, nodeData, flow) + const sandbox = createCodeExecutionSandbox(input, variables, flow) + try { - const response = await vm.run(`module.exports = async function() {${updateStateMemoryCode}}()`, __dirname) + const response = await executeJavaScriptCode(updateStateMemoryCode, sandbox) + if (typeof response !== 'object') throw new Error('Return output must be an object') return response } catch (e) { diff --git a/packages/components/nodes/sequentialagents/State/State.ts b/packages/components/nodes/sequentialagents/State/State.ts index 5c48cb7a7..1d22d7d84 100644 --- a/packages/components/nodes/sequentialagents/State/State.ts +++ b/packages/components/nodes/sequentialagents/State/State.ts @@ -1,13 +1,16 @@ import { START } from '@langchain/langgraph' -import { NodeVM } from '@flowiseai/nodevm' import { DataSource } from 'typeorm' import { ICommonObject, IDatabaseEntity, INode, INodeData, INodeParams, ISeqAgentNode } from '../../../src/Interface' -import { availableDependencies, defaultAllowBuiltInDep, getVars, prepareSandboxVars } from '../../../src/utils' +import { getVars, executeJavaScriptCode, createCodeExecutionSandbox } from '../../../src/utils' const defaultFunc = `{ aggregate: { value: (x, y) => x.concat(y), // here we append the new message to the existing messages default: () => [] + }, + replacedValue: { + value: (x, y) => y ?? x, + default: () => null } }` @@ -190,7 +193,7 @@ class State_SeqAgents implements INode { throw new Error(e) } } else if (selectedTab === 'stateMemoryCode' && stateMemoryCode) { - const variables = await getVars(appDataSource, databaseEntities, nodeData) + const variables = await getVars(appDataSource, databaseEntities, nodeData, options) const flow = { chatflowId: options.chatflowid, sessionId: options.sessionId, @@ -198,37 +201,11 @@ class State_SeqAgents implements INode { input } - let sandbox: any = { - util: undefined, - Symbol: undefined, - child_process: undefined, - fs: undefined, - process: undefined - } - sandbox['$vars'] = prepareSandboxVars(variables) - sandbox['$flow'] = flow + const sandbox = createCodeExecutionSandbox('', variables, flow) - const builtinDeps = process.env.TOOL_FUNCTION_BUILTIN_DEP - ? defaultAllowBuiltInDep.concat(process.env.TOOL_FUNCTION_BUILTIN_DEP.split(',')) - : defaultAllowBuiltInDep - const externalDeps = process.env.TOOL_FUNCTION_EXTERNAL_DEP ? process.env.TOOL_FUNCTION_EXTERNAL_DEP.split(',') : [] - const deps = availableDependencies.concat(externalDeps) - - const nodeVMOptions = { - console: 'inherit', - sandbox, - require: { - external: { modules: deps }, - builtin: builtinDeps - }, - eval: false, - wasm: false, - timeout: 10000 - } as any - - const vm = new NodeVM(nodeVMOptions) try { - const response = await vm.run(`module.exports = async function() {return ${stateMemoryCode}}()`, __dirname) + const response = await executeJavaScriptCode(`return ${stateMemoryCode}`, sandbox) + if (typeof response !== 'object') throw new Error('State must be an object') const returnOutput: ISeqAgentNode = { id: nodeData.id, diff --git a/packages/components/nodes/sequentialagents/ToolNode/ToolNode.ts b/packages/components/nodes/sequentialagents/ToolNode/ToolNode.ts index fda82aec9..3475a2772 100644 --- a/packages/components/nodes/sequentialagents/ToolNode/ToolNode.ts +++ b/packages/components/nodes/sequentialagents/ToolNode/ToolNode.ts @@ -12,11 +12,11 @@ import { import { AIMessage, AIMessageChunk, BaseMessage, ToolMessage } from '@langchain/core/messages' import { StructuredTool } from '@langchain/core/tools' import { RunnableConfig } from '@langchain/core/runnables' -import { ARTIFACTS_PREFIX, SOURCE_DOCUMENTS_PREFIX } from '../../../src/agents' +import { ARTIFACTS_PREFIX, SOURCE_DOCUMENTS_PREFIX, TOOL_ARGS_PREFIX } from '../../../src/agents' import { Document } from '@langchain/core/documents' import { DataSource } from 'typeorm' -import { MessagesState, RunnableCallable, customGet, getVM } from '../commonUtils' -import { getVars, prepareSandboxVars } from '../../../src/utils' +import { MessagesState, RunnableCallable, customGet } from '../commonUtils' +import { getVars, prepareSandboxVars, executeJavaScriptCode, createCodeExecutionSandbox } from '../../../src/utils' import { ChatPromptTemplate } from '@langchain/core/prompts' const defaultApprovalPrompt = `You are about to execute tool: {tools}. Ask if user want to proceed` @@ -448,6 +448,17 @@ class ToolNode ext } } + let toolInput + if (typeof output === 'string' && output.includes(TOOL_ARGS_PREFIX)) { + const outputArray = output.split(TOOL_ARGS_PREFIX) + output = outputArray[0] + try { + toolInput = JSON.parse(outputArray[1]) + } catch (e) { + console.error('Error parsing tool input from tool') + } + } + return new ToolMessage({ name: tool.name, content: typeof output === 'string' ? output : JSON.stringify(output), @@ -455,11 +466,11 @@ class ToolNode ext additional_kwargs: { sourceDocuments, artifacts, - args: call.args, + args: toolInput ?? call.args, usedTools: [ { tool: tool.name ?? '', - toolInput: call.args, + toolInput: toolInput ?? call.args, toolOutput: output } ] @@ -498,7 +509,7 @@ const getReturnOutput = async ( const updateStateMemory = nodeData.inputs?.updateStateMemory as string const selectedTab = tabIdentifier ? tabIdentifier.split(`_${nodeData.id}`)[0] : 'updateStateMemoryUI' - const variables = await getVars(appDataSource, databaseEntities, nodeData) + const variables = await getVars(appDataSource, databaseEntities, nodeData, options) const reformattedOutput = outputs.map((output) => { return { @@ -561,9 +572,11 @@ const getReturnOutput = async ( throw new Error(e) } } else if (selectedTab === 'updateStateMemoryCode' && updateStateMemoryCode) { - const vm = await getVM(appDataSource, databaseEntities, nodeData, flow) + const sandbox = createCodeExecutionSandbox(input, variables, flow) + try { - const response = await vm.run(`module.exports = async function() {${updateStateMemoryCode}}()`, __dirname) + const response = await executeJavaScriptCode(updateStateMemoryCode, sandbox) + if (typeof response !== 'object') throw new Error('Return output must be an object') return response } catch (e) { diff --git a/packages/components/nodes/sequentialagents/commonUtils.ts b/packages/components/nodes/sequentialagents/commonUtils.ts index 3fe298f7f..e970a64ea 100644 --- a/packages/components/nodes/sequentialagents/commonUtils.ts +++ b/packages/components/nodes/sequentialagents/commonUtils.ts @@ -1,7 +1,6 @@ import { get } from 'lodash' import { z } from 'zod' import { DataSource } from 'typeorm' -import { NodeVM } from '@flowiseai/nodevm' import { StructuredTool } from '@langchain/core/tools' import { ChatMistralAI } from '@langchain/mistralai' import { ChatAnthropic } from '@langchain/anthropic' @@ -17,7 +16,7 @@ import { IVisionChatModal, ConversationHistorySelection } from '../../src/Interface' -import { availableDependencies, defaultAllowBuiltInDep, getVars, prepareSandboxVars } from '../../src/utils' +import { getVars, executeJavaScriptCode, createCodeExecutionSandbox } from '../../src/utils' import { ChatPromptTemplate, BaseMessagePromptTemplateLike } from '@langchain/core/prompts' export const checkCondition = (input: string | number | undefined, condition: string, value: string | number = ''): boolean => { @@ -150,40 +149,6 @@ export const processImageMessage = async (llm: BaseChatModel, nodeData: INodeDat return multiModalMessageContent } -export const getVM = async (appDataSource: DataSource, databaseEntities: IDatabaseEntity, nodeData: INodeData, flow: ICommonObject) => { - const variables = await getVars(appDataSource, databaseEntities, nodeData) - - let sandbox: any = { - util: undefined, - Symbol: undefined, - child_process: undefined, - fs: undefined, - process: undefined - } - sandbox['$vars'] = prepareSandboxVars(variables) - sandbox['$flow'] = flow - - const builtinDeps = process.env.TOOL_FUNCTION_BUILTIN_DEP - ? defaultAllowBuiltInDep.concat(process.env.TOOL_FUNCTION_BUILTIN_DEP.split(',')) - : defaultAllowBuiltInDep - const externalDeps = process.env.TOOL_FUNCTION_EXTERNAL_DEP ? process.env.TOOL_FUNCTION_EXTERNAL_DEP.split(',') : [] - const deps = availableDependencies.concat(externalDeps) - - const nodeVMOptions = { - console: 'inherit', - sandbox, - require: { - external: { modules: deps }, - builtin: builtinDeps - }, - eval: false, - wasm: false, - timeout: 10000 - } as any - - return new NodeVM(nodeVMOptions) -} - export const customGet = (obj: any, path: string) => { if (path.includes('[-1]')) { const parts = path.split('.') @@ -273,7 +238,7 @@ export function filterConversationHistory( export const restructureMessages = (llm: BaseChatModel, state: ISeqAgentsState) => { const messages: BaseMessage[] = [] for (const message of state.messages as unknown as BaseMessage[]) { - // Sometimes Anthropic can return a message with content types of array, ignore that EXECEPT when tool calls are present + // Sometimes Anthropic can return a message with content types of array, ignore that EXCEPT when tool calls are present if ((message as any).tool_calls?.length && message.content !== '') { message.content = JSON.stringify(message.content) } @@ -420,9 +385,19 @@ export const checkMessageHistory = async ( if (messageHistory) { const appDataSource = options.appDataSource as DataSource const databaseEntities = options.databaseEntities as IDatabaseEntity - const vm = await getVM(appDataSource, databaseEntities, nodeData, {}) + + const variables = await getVars(appDataSource, databaseEntities, nodeData, options) + const flow = { + chatflowId: options.chatflowid, + sessionId: options.sessionId, + chatId: options.chatId + } + + const sandbox = createCodeExecutionSandbox('', variables, flow) + try { - const response = await vm.run(`module.exports = async function() {${messageHistory}}()`, __dirname) + const response = await executeJavaScriptCode(messageHistory, sandbox) + if (!Array.isArray(response)) throw new Error('Returned message history must be an array') if (sysPrompt) { // insert at index 1 diff --git a/packages/components/nodes/textsplitters/MarkdownTextSplitter/MarkdownTextSplitter.ts b/packages/components/nodes/textsplitters/MarkdownTextSplitter/MarkdownTextSplitter.ts index 82d9f1e22..4170972ad 100644 --- a/packages/components/nodes/textsplitters/MarkdownTextSplitter/MarkdownTextSplitter.ts +++ b/packages/components/nodes/textsplitters/MarkdownTextSplitter/MarkdownTextSplitter.ts @@ -16,7 +16,7 @@ class MarkdownTextSplitter_TextSplitters implements INode { constructor() { this.label = 'Markdown Text Splitter' this.name = 'markdownTextSplitter' - this.version = 1.0 + this.version = 1.1 this.type = 'MarkdownTextSplitter' this.icon = 'markdownTextSplitter.svg' this.category = 'Text Splitters' @@ -38,6 +38,44 @@ class MarkdownTextSplitter_TextSplitters implements INode { description: 'Number of characters to overlap between chunks. Default is 200.', default: 200, optional: true + }, + { + label: 'Split by Headers', + name: 'splitByHeaders', + type: 'options', + description: 'Split documents at specified header levels. Headers will be included with their content.', + default: 'disabled', + options: [ + { + label: 'Disabled', + name: 'disabled' + }, + { + label: '# Headers (H1)', + name: 'h1' + }, + { + label: '## Headers (H2)', + name: 'h2' + }, + { + label: '### Headers (H3)', + name: 'h3' + }, + { + label: '#### Headers (H4)', + name: 'h4' + }, + { + label: '##### Headers (H5)', + name: 'h5' + }, + { + label: '###### Headers (H6)', + name: 'h6' + } + ], + optional: true } ] } @@ -45,6 +83,7 @@ class MarkdownTextSplitter_TextSplitters implements INode { async init(nodeData: INodeData): Promise { const chunkSize = nodeData.inputs?.chunkSize as string const chunkOverlap = nodeData.inputs?.chunkOverlap as string + const splitByHeaders = nodeData.inputs?.splitByHeaders as string const obj = {} as MarkdownTextSplitterParams @@ -53,8 +92,83 @@ class MarkdownTextSplitter_TextSplitters implements INode { const splitter = new MarkdownTextSplitter(obj) + if (splitByHeaders && splitByHeaders !== 'disabled') { + return { + splitDocuments: async (documents: any[]) => { + const results = [] + + for (const doc of documents) { + const chunks = await this.splitByHeaders(doc.pageContent, splitByHeaders, splitter) + for (const chunk of chunks) { + results.push({ + pageContent: chunk, + metadata: { ...doc.metadata } + }) + } + } + + return results + }, + splitText: async (text: string) => { + return await this.splitByHeaders(text, splitByHeaders, splitter) + } + } + } + return splitter } + + private async splitByHeaders(text: string, headerLevel: string, fallbackSplitter: any): Promise { + const maxLevel = this.getHeaderLevel(headerLevel) + if (maxLevel === 0) return await fallbackSplitter.splitText(text) + + const lines = text.split('\n') + const sections: string[] = [] + let currentSection: string[] = [] + + for (const line of lines) { + const isHeader = line.startsWith('#') && line.match(/^#{1,6}\s/) + const headerDepth = isHeader ? line.match(/^(#+)/)?.[1]?.length || 0 : 0 + + if (isHeader && headerDepth <= maxLevel) { + // Save previous section + if (currentSection.length > 0) { + sections.push(currentSection.join('\n').trim()) + } + // Start new section + currentSection = [line] + } else { + // Add line to current section + currentSection.push(line) + } + } + + // Add final section + if (currentSection.length > 0) { + sections.push(currentSection.join('\n').trim()) + } + + return sections + } + + private getHeaderLevel(headerLevel: string): number { + switch (headerLevel) { + case 'h1': + return 1 + case 'h2': + return 2 + case 'h3': + return 3 + case 'h4': + return 4 + case 'h5': + return 5 + case 'h6': + return 6 + default: + return 0 + } + } } module.exports = { nodeClass: MarkdownTextSplitter_TextSplitters } diff --git a/packages/components/nodes/tools/AWSDynamoDBKVStorage/AWSDynamoDBKVStorage.test.ts b/packages/components/nodes/tools/AWSDynamoDBKVStorage/AWSDynamoDBKVStorage.test.ts new file mode 100644 index 000000000..c69b48aeb --- /dev/null +++ b/packages/components/nodes/tools/AWSDynamoDBKVStorage/AWSDynamoDBKVStorage.test.ts @@ -0,0 +1,479 @@ +// Mock AWS SDK DynamoDB client +jest.mock('@aws-sdk/client-dynamodb', () => { + const mockSend = jest.fn() + + // Create mock constructors that capture inputs + const PutItemCommandMock = jest.fn((input) => ({ input, _type: 'PutItemCommand' })) + const QueryCommandMock = jest.fn((input) => ({ input, _type: 'QueryCommand' })) + + return { + DynamoDBClient: jest.fn().mockImplementation(() => ({ + send: mockSend + })), + DescribeTableCommand: jest.fn(), + ListTablesCommand: jest.fn(), + PutItemCommand: PutItemCommandMock, + QueryCommand: QueryCommandMock, + __mockSend: mockSend + } +}) + +// Mock AWS credentials utility +jest.mock('../../../src/awsToolsUtils', () => ({ + AWS_REGIONS: [ + { label: 'US East (N. Virginia)', name: 'us-east-1' }, + { label: 'US West (Oregon)', name: 'us-west-2' } + ], + DEFAULT_AWS_REGION: 'us-east-1', + getAWSCredentials: jest.fn(() => + Promise.resolve({ + accessKeyId: 'test-access-key', + secretAccessKey: 'test-secret-key', + sessionToken: 'test-session-token' + }) + ) +})) + +// Mock getBaseClasses function +jest.mock('../../../src/utils', () => ({ + getBaseClasses: jest.fn(() => ['Tool', 'StructuredTool']) +})) + +describe('AWSDynamoDBKVStorage', () => { + let AWSDynamoDBKVStorage_Tools: any + let mockSend: jest.Mock + let PutItemCommandMock: jest.Mock + let QueryCommandMock: jest.Mock + + // Helper function to create a node instance + const createNode = () => new AWSDynamoDBKVStorage_Tools() + + // Helper function to create nodeData + const createNodeData = (overrides = {}) => ({ + inputs: { + region: 'us-east-1', + tableName: 'test-table', + keyPrefix: '', + operation: 'store', + ...overrides + } + }) + + beforeEach(async () => { + // Clear all mocks before each test + jest.clearAllMocks() + + // Get the mock functions + const dynamoDBModule = require('@aws-sdk/client-dynamodb') + mockSend = dynamoDBModule.__mockSend + PutItemCommandMock = dynamoDBModule.PutItemCommand + QueryCommandMock = dynamoDBModule.QueryCommand + + mockSend.mockReset() + PutItemCommandMock.mockClear() + QueryCommandMock.mockClear() + + // Dynamic import to get fresh module instance + const module = (await import('./AWSDynamoDBKVStorage')) as any + AWSDynamoDBKVStorage_Tools = module.nodeClass + }) + + describe('AWSDynamoDBKVStorage_Tools Node', () => { + it('should have correct input parameters', () => { + const node = createNode() + const inputNames = node.inputs.map((input: any) => input.name) + + expect(inputNames).toEqual(['region', 'tableName', 'keyPrefix', 'operation']) + }) + }) + + describe('loadMethods - listTables', () => { + it('should list valid DynamoDB tables with correct schema', async () => { + const node = createNode() + + // Mock responses for list and describe commands + mockSend + .mockResolvedValueOnce({ + TableNames: ['table1', 'table2', 'invalid-table'] + }) + .mockResolvedValueOnce({ + Table: { + KeySchema: [ + { AttributeName: 'pk', KeyType: 'HASH' }, + { AttributeName: 'sk', KeyType: 'RANGE' } + ] + } + }) + .mockResolvedValueOnce({ + Table: { + KeySchema: [ + { AttributeName: 'pk', KeyType: 'HASH' }, + { AttributeName: 'sk', KeyType: 'RANGE' } + ] + } + }) + .mockResolvedValueOnce({ + Table: { + KeySchema: [{ AttributeName: 'id', KeyType: 'HASH' }] + } + }) + + const nodeData = { inputs: { region: 'us-east-1' } } + + const result = await node.loadMethods.listTables(nodeData, {}) + + expect(result).toEqual([ + { + label: 'table1', + name: 'table1', + description: 'Table with pk (partition) and sk (sort) keys' + }, + { + label: 'table2', + name: 'table2', + description: 'Table with pk (partition) and sk (sort) keys' + } + ]) + }) + + it('should return error when no tables found', async () => { + const node = createNode() + + mockSend.mockResolvedValueOnce({ + TableNames: [] + }) + + const nodeData = { inputs: { region: 'us-east-1' } } + + const result = await node.loadMethods.listTables(nodeData, {}) + + expect(result).toEqual([ + { + label: 'No tables found', + name: 'error', + description: 'No DynamoDB tables found in this region' + } + ]) + }) + + it('should return error when no compatible tables found', async () => { + const node = createNode() + + mockSend + .mockResolvedValueOnce({ + TableNames: ['invalid-table'] + }) + .mockResolvedValueOnce({ + Table: { + KeySchema: [{ AttributeName: 'id', KeyType: 'HASH' }] + } + }) + + const nodeData = { inputs: { region: 'us-east-1' } } + + const result = await node.loadMethods.listTables(nodeData, {}) + + expect(result).toHaveLength(1) + expect(result[0]).toMatchObject({ + label: 'No compatible tables found', + name: 'error' + }) + expect(result[0].description).toContain('Found 1 table(s) with different schema') + }) + + it('should handle AWS credentials error', async () => { + const node = createNode() + const { getAWSCredentials } = require('../../../src/awsToolsUtils') + + getAWSCredentials.mockRejectedValueOnce(new Error('AWS Access Key not found')) + + const nodeData = { inputs: { region: 'us-east-1' } } + + const result = await node.loadMethods.listTables(nodeData, {}) + + expect(result).toEqual([ + { + label: 'AWS Credentials Required', + name: 'error', + description: 'Enter AWS Access Key ID and Secret Access Key' + } + ]) + }) + }) + + describe('init method', () => { + it.each([ + ['store', 'test-prefix', 'dynamodb_kv_store', 'Store a text value with a key in DynamoDB'], + ['retrieve', '', 'dynamodb_kv_retrieve', 'Retrieve a value by key from DynamoDB'] + ])('should create correct tool for %s operation', async (operation, keyPrefix, expectedName, expectedDescription) => { + const node = createNode() + const nodeData = createNodeData({ keyPrefix, operation }) + + const tool = await node.init(nodeData, '', {}) + + expect(tool.name).toBe(expectedName) + expect(tool.description).toContain(expectedDescription) + }) + + it.each([ + ['error', '', 'Valid DynamoDB Table selection is required'], + ['test-table', 'prefix#invalid', 'Key prefix cannot contain "#" character'] + ])('should throw error for invalid config (table: %s, prefix: %s)', async (tableName, keyPrefix, expectedError) => { + const node = createNode() + const nodeData = createNodeData({ tableName, keyPrefix }) + + await expect(node.init(nodeData, '', {})).rejects.toThrow(expectedError) + }) + }) + + describe('DynamoDBStoreTool', () => { + it('should store value successfully', async () => { + const node = createNode() + + mockSend.mockResolvedValueOnce({}) + + const nodeData = createNodeData({ keyPrefix: 'test' }) + + const tool = await node.init(nodeData, '', {}) + const result = await tool._call({ key: 'mykey', value: 'myvalue' }) + + expect(result).toContain('Successfully stored value with key "mykey"') + expect(mockSend).toHaveBeenCalledTimes(1) + + // Verify PutItemCommand was called with correct parameters + expect(PutItemCommandMock).toHaveBeenCalledTimes(1) + const putCommandInput = PutItemCommandMock.mock.calls[0][0] + + expect(putCommandInput).toMatchObject({ + TableName: 'test-table', + Item: { + pk: { S: 'test#mykey' }, + value: { S: 'myvalue' } + } + }) + + // Verify timestamp fields exist + expect(putCommandInput.Item.sk).toBeDefined() + expect(putCommandInput.Item.timestamp).toBeDefined() + }) + + it.each([ + ['', 'Key must be a non-empty string'], + [' ', 'Key must be a non-empty string'], + ['a'.repeat(2049), 'Key too long'] + ])('should handle invalid key: "%s"', async (key, expectedError) => { + const node = createNode() + + const nodeData = createNodeData() + + const tool = await node.init(nodeData, '', {}) + await expect(tool._call({ key, value: 'myvalue' })).rejects.toThrow(expectedError) + }) + + it.each([ + ['store', { key: 'mykey', value: 'myvalue' }, 'Failed to store value: DynamoDB error'], + ['retrieve', { key: 'mykey' }, 'Failed to retrieve value: DynamoDB error'] + ])('should handle DynamoDB error for %s', async (operation, callParams, expectedError) => { + const node = createNode() + mockSend.mockRejectedValueOnce(new Error('DynamoDB error')) + + const nodeData = createNodeData({ operation }) + const tool = await node.init(nodeData, '', {}) + + await expect(tool._call(callParams)).rejects.toThrow(expectedError) + }) + }) + + describe('DynamoDBRetrieveTool', () => { + it('should retrieve latest value successfully', async () => { + const node = createNode() + + mockSend.mockResolvedValueOnce({ + Items: [ + { + pk: { S: 'test#mykey' }, + sk: { S: '1234567890' }, + value: { S: 'myvalue' }, + timestamp: { S: '2024-01-01T00:00:00.000Z' } + } + ] + }) + + const nodeData = createNodeData({ keyPrefix: 'test', operation: 'retrieve' }) + + const tool = await node.init(nodeData, '', {}) + const result = await tool._call({ key: 'mykey' }) + const parsed = JSON.parse(result) + + expect(parsed).toEqual({ + value: 'myvalue', + timestamp: '2024-01-01T00:00:00.000Z' + }) + expect(mockSend).toHaveBeenCalledTimes(1) + + // Verify QueryCommand was called with correct parameters + expect(QueryCommandMock).toHaveBeenCalledTimes(1) + const queryCommandInput = QueryCommandMock.mock.calls[0][0] + + expect(queryCommandInput).toMatchObject({ + TableName: 'test-table', + KeyConditionExpression: 'pk = :pk', + ExpressionAttributeValues: { + ':pk': { S: 'test#mykey' } + }, + ScanIndexForward: false, + Limit: 1 + }) + }) + + it('should retrieve nth latest value', async () => { + const node = createNode() + + mockSend.mockResolvedValueOnce({ + Items: [ + { + pk: { S: 'mykey' }, + sk: { S: '1234567892' }, + value: { S: 'newest' }, + timestamp: { S: '2024-01-03T00:00:00.000Z' } + }, + { + pk: { S: 'mykey' }, + sk: { S: '1234567891' }, + value: { S: 'second' }, + timestamp: { S: '2024-01-02T00:00:00.000Z' } + }, + { + pk: { S: 'mykey' }, + sk: { S: '1234567890' }, + value: { S: 'oldest' }, + timestamp: { S: '2024-01-01T00:00:00.000Z' } + } + ] + }) + + const nodeData = createNodeData({ operation: 'retrieve' }) + + const tool = await node.init(nodeData, '', {}) + const result = await tool._call({ key: 'mykey', nthLatest: '2' }) + const parsed = JSON.parse(result) + + expect(parsed).toEqual({ + value: 'second', + timestamp: '2024-01-02T00:00:00.000Z' + }) + + // Verify QueryCommand was called with Limit: 2 + expect(QueryCommandMock).toHaveBeenCalledTimes(1) + const queryCommandInput = QueryCommandMock.mock.calls[0][0] + expect(queryCommandInput.Limit).toBe(2) + }) + + it('should return null when key not found', async () => { + const node = createNode() + + mockSend.mockResolvedValueOnce({ + Items: [] + }) + + const nodeData = createNodeData({ operation: 'retrieve' }) + + const tool = await node.init(nodeData, '', {}) + const result = await tool._call({ key: 'nonexistent' }) + const parsed = JSON.parse(result) + + expect(parsed).toEqual({ + value: null, + timestamp: null + }) + }) + + it('should return null when nth version does not exist', async () => { + const node = createNode() + + mockSend.mockResolvedValueOnce({ + Items: [ + { + pk: { S: 'mykey' }, + sk: { S: '1234567890' }, + value: { S: 'only-one' }, + timestamp: { S: '2024-01-01T00:00:00.000Z' } + } + ] + }) + + const nodeData = createNodeData({ operation: 'retrieve' }) + + const tool = await node.init(nodeData, '', {}) + const result = await tool._call({ key: 'mykey', nthLatest: '3' }) + const parsed = JSON.parse(result) + + expect(parsed).toEqual({ + value: null, + timestamp: null + }) + }) + + it.each([ + ['0', 'nthLatest must be a positive number'], + ['-1', 'nthLatest must be a positive number'] + ])('should reject invalid nthLatest value "%s"', async (nthLatest, expectedError) => { + const node = createNode() + + const nodeData = createNodeData({ operation: 'retrieve' }) + + const tool = await node.init(nodeData, '', {}) + await expect(tool._call({ key: 'mykey', nthLatest })).rejects.toThrow(expectedError) + }) + + it.each([ + ['', 'Key must be a non-empty string'], + [' ', 'Key must be a non-empty string'] + ])('should handle invalid key for retrieve: "%s"', async (key, expectedError) => { + const node = createNode() + + const nodeData = createNodeData({ operation: 'retrieve' }) + + const tool = await node.init(nodeData, '', {}) + await expect(tool._call({ key })).rejects.toThrow(expectedError) + }) + }) + + describe('Helper Functions', () => { + it.each([ + ['myapp', 'userdata', 'myapp#userdata'], + ['', 'userdata', 'userdata'] + ])('should build full key correctly (prefix: "%s", key: "%s", expected: "%s")', async (keyPrefix, key, expectedFullKey) => { + const node = createNode() + mockSend.mockResolvedValueOnce({}) + const nodeData = createNodeData({ keyPrefix }) + + const tool = await node.init(nodeData, '', {}) + await tool._call({ key, value: 'test' }) + + // Verify the put command was called with the correct full key + expect(mockSend).toHaveBeenCalledTimes(1) + expect(PutItemCommandMock).toHaveBeenCalledTimes(1) + + const putCommandInput = PutItemCommandMock.mock.calls[0][0] + expect(putCommandInput.Item.pk.S).toBe(expectedFullKey) + }) + + it.each([ + [{ accessKeyId: 'test-key', secretAccessKey: 'test-secret', sessionToken: 'test-token' }, 'with session token'], + [{ accessKeyId: 'test-key', secretAccessKey: 'test-secret' }, 'without session token'] + ])('should work %s', async (credentials, _description) => { + const node = createNode() + const { getAWSCredentials } = require('../../../src/awsToolsUtils') + + getAWSCredentials.mockResolvedValueOnce(credentials) + mockSend.mockResolvedValueOnce({}) + + const nodeData = createNodeData() + + const tool = await node.init(nodeData, '', {}) + await tool._call({ key: 'test', value: 'value' }) + expect(getAWSCredentials).toHaveBeenCalled() + }) + }) +}) diff --git a/packages/components/nodes/tools/AWSDynamoDBKVStorage/AWSDynamoDBKVStorage.ts b/packages/components/nodes/tools/AWSDynamoDBKVStorage/AWSDynamoDBKVStorage.ts new file mode 100644 index 000000000..6d1141592 --- /dev/null +++ b/packages/components/nodes/tools/AWSDynamoDBKVStorage/AWSDynamoDBKVStorage.ts @@ -0,0 +1,375 @@ +import { z } from 'zod' +import { StructuredTool } from '@langchain/core/tools' +import { ICommonObject, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' +import { AWS_REGIONS, DEFAULT_AWS_REGION, AWSCredentials, getAWSCredentials } from '../../../src/awsToolsUtils' +import { DynamoDBClient, DescribeTableCommand, ListTablesCommand, PutItemCommand, QueryCommand } from '@aws-sdk/client-dynamodb' + +// Operation enum +enum Operation { + STORE = 'store', + RETRIEVE = 'retrieve' +} + +// Constants +const ERROR_PLACEHOLDER = 'error' +const KEY_SEPARATOR = '#' +const MAX_KEY_LENGTH = 2048 // DynamoDB limit for partition key + +// Helper function to create DynamoDB client +function createDynamoDBClient(credentials: AWSCredentials, region: string): DynamoDBClient { + return new DynamoDBClient({ + region, + credentials: { + accessKeyId: credentials.accessKeyId, + secretAccessKey: credentials.secretAccessKey, + ...(credentials.sessionToken && { sessionToken: credentials.sessionToken }) + } + }) +} + +// Helper function to build full key with optional prefix +function buildFullKey(key: string, keyPrefix: string): string { + const fullKey = keyPrefix ? `${keyPrefix}${KEY_SEPARATOR}${key}` : key + + // Validate key length (DynamoDB limit) + if (fullKey.length > MAX_KEY_LENGTH) { + throw new Error(`Key too long. Maximum length is ${MAX_KEY_LENGTH} characters, got ${fullKey.length}`) + } + + return fullKey +} + +// Helper function to validate and sanitize input +function validateKey(key: string): void { + if (!key || key.trim().length === 0) { + throw new Error('Key must be a non-empty string') + } +} + +/** + * Tool for storing key-value pairs in DynamoDB with automatic versioning + */ +class DynamoDBStoreTool extends StructuredTool { + name = 'dynamodb_kv_store' + description = 'Store a text value with a key in DynamoDB. Input must be an object with "key" and "value" properties.' + schema = z.object({ + key: z.string().min(1).describe('The key to store the value under'), + value: z.string().describe('The text value to store') + }) + private readonly dynamoClient: DynamoDBClient + private readonly tableName: string + private readonly keyPrefix: string + + constructor(dynamoClient: DynamoDBClient, tableName: string, keyPrefix: string = '') { + super() + this.dynamoClient = dynamoClient + this.tableName = tableName + this.keyPrefix = keyPrefix + } + + async _call({ key, value }: z.infer): Promise { + try { + validateKey(key) + const fullKey = buildFullKey(key, this.keyPrefix) + const timestamp = Date.now() + const isoTimestamp = new Date(timestamp).toISOString() + + const putCommand = new PutItemCommand({ + TableName: this.tableName, + Item: { + pk: { S: fullKey }, + sk: { S: timestamp.toString() }, + value: { S: value }, + timestamp: { S: isoTimestamp } + } + }) + + await this.dynamoClient.send(putCommand) + return `Successfully stored value with key "${key}" at ${isoTimestamp}` + } catch (error) { + const errorMessage = error instanceof Error ? error.message : String(error) + throw new Error(`Failed to store value: ${errorMessage}`) + } + } +} + +/** + * Tool for retrieving key-value pairs from DynamoDB with version control + */ +class DynamoDBRetrieveTool extends StructuredTool { + name = 'dynamodb_kv_retrieve' + description = + 'Retrieve a value by key from DynamoDB. Returns JSON with value and timestamp. Specify which version to get (1=latest, 2=2nd latest, etc).' + schema = z.object({ + key: z.string().min(1).describe('The key to retrieve the value for'), + nthLatest: z + .string() + .regex(/^\d+$/, 'Must be a positive number') + .describe( + 'Which version to retrieve: "1" for latest, "2" for 2nd latest, "3" for 3rd latest, etc. Use "1" to get the most recent value.' + ) + .optional() + .default('1') + }) + private readonly dynamoClient: DynamoDBClient + private readonly tableName: string + private readonly keyPrefix: string + + constructor(dynamoClient: DynamoDBClient, tableName: string, keyPrefix: string = '') { + super() + this.dynamoClient = dynamoClient + this.tableName = tableName + this.keyPrefix = keyPrefix + } + + async _call(input: z.infer): Promise { + try { + const { key, nthLatest = '1' } = input + validateKey(key) + const fullKey = buildFullKey(key, this.keyPrefix) + + // Convert string to number and validate + const nthLatestNum = parseInt(nthLatest, 10) + if (isNaN(nthLatestNum) || nthLatestNum < 1) { + throw new Error('nthLatest must be a positive number (1 or greater)') + } + + const queryCommand = new QueryCommand({ + TableName: this.tableName, + KeyConditionExpression: 'pk = :pk', + ExpressionAttributeValues: { + ':pk': { S: fullKey } + }, + ScanIndexForward: false, // Sort descending (newest first) + Limit: nthLatestNum + }) + + const result = await this.dynamoClient.send(queryCommand) + + if (!result.Items || result.Items.length === 0) { + return JSON.stringify({ + value: null, + timestamp: null + }) + } + + if (result.Items.length < nthLatestNum) { + return JSON.stringify({ + value: null, + timestamp: null + }) + } + + const item = result.Items[nthLatestNum - 1] + const value = item.value?.S || null + const timestamp = item.timestamp?.S || item.sk?.S || null + + // Return JSON with value and timestamp + return JSON.stringify({ + value: value, + timestamp: timestamp + }) + } catch (error) { + const errorMessage = error instanceof Error ? error.message : String(error) + throw new Error(`Failed to retrieve value: ${errorMessage}`) + } + } +} + +/** + * Node implementation for AWS DynamoDB KV Storage tools + */ +class AWSDynamoDBKVStorage_Tools implements INode { + label: string + name: string + version: number + type: string + icon: string + category: string + description: string + baseClasses: string[] + credential: INodeParams + inputs: INodeParams[] + + constructor() { + this.label = 'AWS DynamoDB KV Storage' + this.name = 'awsDynamoDBKVStorage' + this.version = 1.0 + this.type = 'AWSDynamoDBKVStorage' + this.icon = 'dynamodbkvstorage.svg' + this.category = 'Tools' + this.description = 'Store and retrieve versioned text values in AWS DynamoDB' + this.baseClasses = [this.type, ...getBaseClasses(DynamoDBStoreTool)] + this.credential = { + label: 'AWS Credentials', + name: 'credential', + type: 'credential', + credentialNames: ['awsApi'] + } + this.inputs = [ + { + label: 'AWS Region', + name: 'region', + type: 'options', + options: AWS_REGIONS, + default: DEFAULT_AWS_REGION, + description: 'AWS Region where your DynamoDB tables are located' + }, + { + label: 'DynamoDB Table', + name: 'tableName', + type: 'asyncOptions', + loadMethod: 'listTables', + description: 'Select a DynamoDB table with partition key "pk" and sort key "sk"', + refresh: true + }, + { + label: 'Key Prefix', + name: 'keyPrefix', + type: 'string', + description: 'Optional prefix to add to all keys (e.g., "myapp" would make keys like "myapp#userdata")', + optional: true, + additionalParams: true + }, + { + label: 'Operation', + name: 'operation', + type: 'options', + options: [ + { label: 'Store', name: Operation.STORE }, + { label: 'Retrieve', name: Operation.RETRIEVE } + ], + default: Operation.STORE, + description: 'Choose whether to store or retrieve data' + } + ] + } + + loadMethods: Record Promise> = { + listTables: async (nodeData: INodeData, options?: ICommonObject): Promise => { + try { + const credentials = await getAWSCredentials(nodeData, options ?? {}) + const region = (nodeData.inputs?.region as string) || DEFAULT_AWS_REGION + const dynamoClient = createDynamoDBClient(credentials, region) + + const listCommand = new ListTablesCommand({}) + const listResponse = await dynamoClient.send(listCommand) + + if (!listResponse.TableNames || listResponse.TableNames.length === 0) { + return [ + { + label: 'No tables found', + name: ERROR_PLACEHOLDER, + description: 'No DynamoDB tables found in this region' + } + ] + } + + const validTables: INodeOptionsValue[] = [] + const invalidTables: string[] = [] + + // Check tables in parallel for better performance + const tableChecks = await Promise.allSettled( + listResponse.TableNames.map(async (tableName) => { + const describeCommand = new DescribeTableCommand({ + TableName: tableName + }) + const describeResponse = await dynamoClient.send(describeCommand) + + const keySchema = describeResponse.Table?.KeySchema + if (keySchema) { + const hasPk = keySchema.some((key) => key.AttributeName === 'pk' && key.KeyType === 'HASH') + const hasSk = keySchema.some((key) => key.AttributeName === 'sk' && key.KeyType === 'RANGE') + + if (hasPk && hasSk) { + return { + valid: true, + table: { + label: tableName, + name: tableName, + description: `Table with pk (partition) and sk (sort) keys` + } + } + } + } + return { valid: false, tableName } + }) + ) + + tableChecks.forEach((result) => { + if (result.status === 'fulfilled') { + if (result.value.valid) { + validTables.push(result.value.table!) + } else if (result.value.tableName) { + invalidTables.push(result.value.tableName) + } + } + }) + + if (validTables.length === 0) { + return [ + { + label: 'No compatible tables found', + name: ERROR_PLACEHOLDER, + description: `No tables with partition key "pk" and sort key "sk" found. ${ + invalidTables.length > 0 ? `Found ${invalidTables.length} table(s) with different schema.` : '' + } Please create a table with these keys.` + } + ] + } + + // Sort tables alphabetically + validTables.sort((a, b) => a.label.localeCompare(b.label)) + + return validTables + } catch (error) { + if (error instanceof Error && error.message.includes('AWS Access Key')) { + return [ + { + label: 'AWS Credentials Required', + name: ERROR_PLACEHOLDER, + description: 'Enter AWS Access Key ID and Secret Access Key' + } + ] + } + console.error('Error loading DynamoDB tables:', error) + return [ + { + label: 'Error Loading Tables', + name: ERROR_PLACEHOLDER, + description: `Failed to load tables: ${error instanceof Error ? error.message : String(error)}` + } + ] + } + } + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const credentials = await getAWSCredentials(nodeData, options) + + const region = (nodeData.inputs?.region as string) || DEFAULT_AWS_REGION + const tableName = nodeData.inputs?.tableName as string + const keyPrefix = (nodeData.inputs?.keyPrefix as string) || '' + const operation = (nodeData.inputs?.operation as string) || Operation.STORE + + if (!tableName || tableName === ERROR_PLACEHOLDER) { + throw new Error('Valid DynamoDB Table selection is required') + } + + // Validate key prefix doesn't contain separator + if (keyPrefix && keyPrefix.includes(KEY_SEPARATOR)) { + throw new Error(`Key prefix cannot contain "${KEY_SEPARATOR}" character`) + } + + const dynamoClient = createDynamoDBClient(credentials, region) + + if (operation === Operation.STORE) { + return new DynamoDBStoreTool(dynamoClient, tableName, keyPrefix) + } else { + return new DynamoDBRetrieveTool(dynamoClient, tableName, keyPrefix) + } + } +} + +module.exports = { nodeClass: AWSDynamoDBKVStorage_Tools } diff --git a/packages/components/nodes/tools/AWSDynamoDBKVStorage/dynamodbkvstorage.svg b/packages/components/nodes/tools/AWSDynamoDBKVStorage/dynamodbkvstorage.svg new file mode 100644 index 000000000..3912d7a8f --- /dev/null +++ b/packages/components/nodes/tools/AWSDynamoDBKVStorage/dynamodbkvstorage.svg @@ -0,0 +1,29 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + K:V + \ No newline at end of file diff --git a/packages/components/nodes/tools/AWSSNS/AWSSNS.ts b/packages/components/nodes/tools/AWSSNS/AWSSNS.ts new file mode 100644 index 000000000..8dc09d8ee --- /dev/null +++ b/packages/components/nodes/tools/AWSSNS/AWSSNS.ts @@ -0,0 +1,146 @@ +import { Tool } from '@langchain/core/tools' +import { ICommonObject, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' +import { AWS_REGIONS, DEFAULT_AWS_REGION, getAWSCredentials } from '../../../src/awsToolsUtils' +import { SNSClient, ListTopicsCommand, PublishCommand } from '@aws-sdk/client-sns' + +class AWSSNSTool extends Tool { + name = 'aws_sns_publish' + description = 'Publishes a message to an AWS SNS topic' + private snsClient: SNSClient + private topicArn: string + + constructor(snsClient: SNSClient, topicArn: string) { + super() + this.snsClient = snsClient + this.topicArn = topicArn + } + + async _call(message: string): Promise { + try { + const command = new PublishCommand({ + TopicArn: this.topicArn, + Message: message + }) + + const response = await this.snsClient.send(command) + return `Successfully published message to SNS topic. MessageId: ${response.MessageId}` + } catch (error) { + return `Failed to publish message to SNS: ${error}` + } + } +} + +class AWSSNS_Tools implements INode { + label: string + name: string + version: number + type: string + icon: string + category: string + description: string + baseClasses: string[] + credential: INodeParams + inputs: INodeParams[] + + constructor() { + this.label = 'AWS SNS' + this.name = 'awsSNS' + this.version = 1.0 + this.type = 'AWSSNS' + this.icon = 'awssns.svg' + this.category = 'Tools' + this.description = 'Publish messages to AWS SNS topics' + this.baseClasses = [this.type, ...getBaseClasses(AWSSNSTool)] + this.credential = { + label: 'AWS Credentials', + name: 'credential', + type: 'credential', + credentialNames: ['awsApi'] + } + this.inputs = [ + { + label: 'AWS Region', + name: 'region', + type: 'options', + options: AWS_REGIONS, + default: DEFAULT_AWS_REGION, + description: 'AWS Region where your SNS topics are located' + }, + { + label: 'SNS Topic', + name: 'topicArn', + type: 'asyncOptions', + loadMethod: 'listTopics', + description: 'Select the SNS topic to publish to', + refresh: true + } + ] + } + + //@ts-ignore + loadMethods = { + listTopics: async (nodeData: INodeData, options?: ICommonObject): Promise => { + try { + const credentials = await getAWSCredentials(nodeData, options ?? {}) + const region = (nodeData.inputs?.region as string) || DEFAULT_AWS_REGION + + const snsClient = new SNSClient({ + region: region, + credentials: credentials + }) + + const command = new ListTopicsCommand({}) + const response = await snsClient.send(command) + + if (!response.Topics || response.Topics.length === 0) { + return [ + { + label: 'No topics found', + name: 'placeholder', + description: 'No SNS topics found in this region' + } + ] + } + + return response.Topics.map((topic) => { + const topicArn = topic.TopicArn || '' + const topicName = topicArn.split(':').pop() || topicArn + return { + label: topicName, + name: topicArn, + description: topicArn + } + }) + } catch (error) { + console.error('Error loading SNS topics:', error) + return [ + { + label: 'AWS Credentials Required', + name: 'placeholder', + description: 'Enter AWS Access Key ID and Secret Access Key' + } + ] + } + } + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const credentials = await getAWSCredentials(nodeData, options) + const region = (nodeData.inputs?.region as string) || DEFAULT_AWS_REGION + const topicArn = nodeData.inputs?.topicArn as string + + if (!topicArn) { + throw new Error('SNS Topic ARN is required') + } + + const snsClient = new SNSClient({ + region: region, + credentials: credentials + }) + + return new AWSSNSTool(snsClient, topicArn) + } +} + +module.exports = { nodeClass: AWSSNS_Tools } diff --git a/packages/components/nodes/tools/AWSSNS/awssns.svg b/packages/components/nodes/tools/AWSSNS/awssns.svg new file mode 100644 index 000000000..fe00ae2c6 --- /dev/null +++ b/packages/components/nodes/tools/AWSSNS/awssns.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/components/nodes/tools/AgentAsTool/AgentAsTool.ts b/packages/components/nodes/tools/AgentAsTool/AgentAsTool.ts new file mode 100644 index 000000000..4fd9e7108 --- /dev/null +++ b/packages/components/nodes/tools/AgentAsTool/AgentAsTool.ts @@ -0,0 +1,384 @@ +import { DataSource } from 'typeorm' +import { z } from 'zod' +import { RunnableConfig } from '@langchain/core/runnables' +import { CallbackManagerForToolRun, Callbacks, CallbackManager, parseCallbackConfigArg } from '@langchain/core/callbacks/manager' +import { StructuredTool } from '@langchain/core/tools' +import { ICommonObject, IDatabaseEntity, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../src/Interface' +import { + getCredentialData, + getCredentialParam, + executeJavaScriptCode, + createCodeExecutionSandbox, + parseWithTypeConversion +} from '../../../src/utils' +import { isValidUUID, isValidURL } from '../../../src/validator' +import { v4 as uuidv4 } from 'uuid' + +class AgentAsTool_Tools implements INode { + label: string + name: string + version: number + description: string + type: string + icon: string + category: string + baseClasses: string[] + credential: INodeParams + inputs: INodeParams[] + + constructor() { + this.label = 'Agent as Tool' + this.name = 'agentAsTool' + this.version = 1.0 + this.type = 'AgentAsTool' + this.icon = 'agentastool.svg' + this.category = 'Tools' + this.description = 'Use as a tool to execute another agentflow' + this.baseClasses = [this.type, 'Tool'] + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + credentialNames: ['agentflowApi'], + optional: true + } + this.inputs = [ + { + label: 'Select Agent', + name: 'selectedAgentflow', + type: 'asyncOptions', + loadMethod: 'listAgentflows' + }, + { + label: 'Tool Name', + name: 'name', + type: 'string' + }, + { + label: 'Tool Description', + name: 'description', + type: 'string', + description: 'Description of what the tool does. This is for LLM to determine when to use this tool.', + rows: 3, + placeholder: + 'State of the Union QA - useful for when you need to ask questions about the most recent state of the union address.' + }, + { + label: 'Return Direct', + name: 'returnDirect', + type: 'boolean', + optional: true + }, + { + label: 'Override Config', + name: 'overrideConfig', + description: 'Override the config passed to the Agentflow.', + type: 'json', + optional: true, + additionalParams: true, + acceptVariable: true + }, + { + label: 'Base URL', + name: 'baseURL', + type: 'string', + description: + 'Base URL to Flowise. By default, it is the URL of the incoming request. Useful when you need to execute the Agentflow through an alternative route.', + placeholder: 'http://localhost:3000', + optional: true, + additionalParams: true + }, + { + label: 'Start new session per message', + name: 'startNewSession', + type: 'boolean', + description: + 'Whether to continue the session with the Agentflow tool or start a new one with each interaction. Useful for Agentflows with memory if you want to avoid it.', + default: false, + optional: true, + additionalParams: true + }, + { + label: 'Use Question from Chat', + name: 'useQuestionFromChat', + type: 'boolean', + description: + 'Whether to use the question from the chat as input to the agentflow. If turned on, this will override the custom input.', + optional: true, + additionalParams: true + }, + { + label: 'Custom Input', + name: 'customInput', + type: 'string', + description: 'Custom input to be passed to the agentflow. Leave empty to let LLM decides the input.', + optional: true, + additionalParams: true, + show: { + useQuestionFromChat: false + } + } + ] + } + + //@ts-ignore + loadMethods = { + async listAgentflows(_: INodeData, options: ICommonObject): Promise { + const returnData: INodeOptionsValue[] = [] + + const appDataSource = options.appDataSource as DataSource + const databaseEntities = options.databaseEntities as IDatabaseEntity + if (appDataSource === undefined || !appDataSource) { + return returnData + } + + const searchOptions = options.searchOptions || {} + const agentflows = await appDataSource.getRepository(databaseEntities['ChatFlow']).findBy({ + ...searchOptions, + type: 'AGENTFLOW' + }) + + for (let i = 0; i < agentflows.length; i += 1) { + const data = { + label: agentflows[i].name, + name: agentflows[i].id + } as INodeOptionsValue + returnData.push(data) + } + return returnData + } + } + + async init(nodeData: INodeData, input: string, options: ICommonObject): Promise { + const selectedAgentflowId = nodeData.inputs?.selectedAgentflow as string + const _name = nodeData.inputs?.name as string + const description = nodeData.inputs?.description as string + const useQuestionFromChat = nodeData.inputs?.useQuestionFromChat as boolean + const returnDirect = nodeData.inputs?.returnDirect as boolean + const customInput = nodeData.inputs?.customInput as string + const overrideConfig = + typeof nodeData.inputs?.overrideConfig === 'string' && + nodeData.inputs.overrideConfig.startsWith('{') && + nodeData.inputs.overrideConfig.endsWith('}') + ? JSON.parse(nodeData.inputs.overrideConfig) + : nodeData.inputs?.overrideConfig + + const startNewSession = nodeData.inputs?.startNewSession as boolean + + const baseURL = (nodeData.inputs?.baseURL as string) || (options.baseURL as string) + + // Validate agentflowid is a valid UUID + if (!selectedAgentflowId || !isValidUUID(selectedAgentflowId)) { + throw new Error('Invalid agentflow ID: must be a valid UUID') + } + + // Validate baseURL is a valid URL + if (!baseURL || !isValidURL(baseURL)) { + throw new Error('Invalid base URL: must be a valid URL') + } + + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const agentflowApiKey = getCredentialParam('agentflowApiKey', credentialData, nodeData) + + if (selectedAgentflowId === options.chatflowid) throw new Error('Cannot call the same agentflow!') + + let headers = {} + if (agentflowApiKey) headers = { Authorization: `Bearer ${agentflowApiKey}` } + + let toolInput = '' + if (useQuestionFromChat) { + toolInput = input + } else if (customInput) { + toolInput = customInput + } + + let name = _name || 'agentflow_tool' + + return new AgentflowTool({ + name, + baseURL, + description, + returnDirect, + agentflowid: selectedAgentflowId, + startNewSession, + headers, + input: toolInput, + overrideConfig + }) + } +} + +class AgentflowTool extends StructuredTool { + static lc_name() { + return 'AgentflowTool' + } + + name = 'agentflow_tool' + + description = 'Execute another agentflow' + + input = '' + + agentflowid = '' + + startNewSession = false + + baseURL = 'http://localhost:3000' + + headers = {} + + overrideConfig?: object + + schema = z.object({ + input: z.string().describe('input question') + // overrideConfig: z.record(z.any()).optional().describe('override config'), // This will be passed to the Agent, so comment it for now. + }) as any + + constructor({ + name, + description, + returnDirect, + input, + agentflowid, + startNewSession, + baseURL, + headers, + overrideConfig + }: { + name: string + description: string + returnDirect: boolean + input: string + agentflowid: string + startNewSession: boolean + baseURL: string + headers: ICommonObject + overrideConfig?: object + }) { + super() + this.name = name + this.description = description + this.input = input + this.baseURL = baseURL + this.startNewSession = startNewSession + this.headers = headers + this.agentflowid = agentflowid + this.overrideConfig = overrideConfig + this.returnDirect = returnDirect + } + + async call( + arg: z.infer, + configArg?: RunnableConfig | Callbacks, + tags?: string[], + flowConfig?: { sessionId?: string; chatId?: string; input?: string } + ): Promise { + const config = parseCallbackConfigArg(configArg) + if (config.runName === undefined) { + config.runName = this.name + } + let parsed + try { + parsed = await parseWithTypeConversion(this.schema, arg) + } catch (e) { + throw new Error(`Received tool input did not match expected schema: ${JSON.stringify(arg)}`) + } + const callbackManager_ = await CallbackManager.configure( + config.callbacks, + this.callbacks, + config.tags || tags, + this.tags, + config.metadata, + this.metadata, + { verbose: this.verbose } + ) + const runManager = await callbackManager_?.handleToolStart( + this.toJSON(), + typeof parsed === 'string' ? parsed : JSON.stringify(parsed), + undefined, + undefined, + undefined, + undefined, + config.runName + ) + let result + try { + result = await this._call(parsed, runManager, flowConfig) + } catch (e) { + await runManager?.handleToolError(e) + throw e + } + if (result && typeof result !== 'string') { + result = JSON.stringify(result) + } + await runManager?.handleToolEnd(result) + return result + } + + // @ts-ignore + protected async _call( + arg: z.infer, + _?: CallbackManagerForToolRun, + flowConfig?: { sessionId?: string; chatId?: string; input?: string } + ): Promise { + const inputQuestion = this.input || arg.input + + const body = { + question: inputQuestion, + chatId: this.startNewSession ? uuidv4() : flowConfig?.chatId, + overrideConfig: { + sessionId: this.startNewSession ? uuidv4() : flowConfig?.sessionId, + ...(this.overrideConfig ?? {}), + ...(arg.overrideConfig ?? {}) + } + } + + const options = { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'flowise-tool': 'true', + ...this.headers + }, + body: JSON.stringify(body) + } + + const code = ` +const fetch = require('node-fetch'); +const url = "${this.baseURL}/api/v1/prediction/${this.agentflowid}"; + +const body = $callBody; + +const options = $callOptions; + +try { + const response = await fetch(url, options); + const resp = await response.json(); + return resp.text; +} catch (error) { + console.error(error); + return ''; +} +` + + // Create additional sandbox variables + const additionalSandbox: ICommonObject = { + $callOptions: options, + $callBody: body + } + + const sandbox = createCodeExecutionSandbox('', [], {}, additionalSandbox) + + let response = await executeJavaScriptCode(code, sandbox, { + useSandbox: false + }) + + if (typeof response === 'object') { + response = JSON.stringify(response) + } + + return response + } +} + +module.exports = { nodeClass: AgentAsTool_Tools } diff --git a/packages/components/nodes/tools/AgentAsTool/agentastool.svg b/packages/components/nodes/tools/AgentAsTool/agentastool.svg new file mode 100644 index 000000000..cd6cd8de9 --- /dev/null +++ b/packages/components/nodes/tools/AgentAsTool/agentastool.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/components/nodes/tools/Arxiv/Arxiv.ts b/packages/components/nodes/tools/Arxiv/Arxiv.ts new file mode 100644 index 000000000..d1fc62221 --- /dev/null +++ b/packages/components/nodes/tools/Arxiv/Arxiv.ts @@ -0,0 +1,144 @@ +import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' +import { ArxivParameters, desc, ArxivTool } from './core' + +class Arxiv_Tools implements INode { + label: string + name: string + version: number + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Arxiv' + this.name = 'arxiv' + this.version = 1.0 + this.type = 'Arxiv' + this.icon = 'arxiv.png' + this.category = 'Tools' + this.description = 'Search and read content from academic papers on Arxiv' + this.baseClasses = [this.type, ...getBaseClasses(ArxivTool)] + this.inputs = [ + { + label: 'Name', + name: 'arxivName', + type: 'string', + default: 'arxiv_search', + description: 'Name of the tool', + additionalParams: true, + optional: true + }, + { + label: 'Description', + name: 'arxivDescription', + type: 'string', + rows: 4, + default: desc, + description: 'Describe to LLM when it should use this tool', + additionalParams: true, + optional: true + }, + { + label: 'Top K Results', + name: 'topKResults', + type: 'number', + description: 'Number of top results to return from Arxiv search', + default: '3', + step: 1, + optional: true, + additionalParams: true + }, + { + label: 'Max Query Length', + name: 'maxQueryLength', + type: 'number', + description: 'Maximum length of the search query', + default: '300', + step: 1, + optional: true, + additionalParams: true + }, + { + label: 'Max Content Length', + name: 'docContentCharsMax', + type: 'number', + description: 'Maximum length of the returned content. Set to 0 for unlimited', + default: '10000', + step: 1, + optional: true, + additionalParams: true + }, + { + label: 'Load Full Content', + name: 'loadFullContent', + type: 'boolean', + description: + 'Download PDFs and extract full paper content instead of just summaries. Warning: This is slower and uses more resources.', + default: false, + optional: true, + additionalParams: true + }, + { + label: 'Continue On Failure', + name: 'continueOnFailure', + type: 'boolean', + description: + 'Continue processing other papers if one fails to download/parse (only applies when Load Full Content is enabled)', + default: false, + optional: true, + additionalParams: true + }, + { + label: 'Use Legacy Build', + name: 'legacyBuild', + type: 'boolean', + description: 'Use legacy PDF.js build for PDF parsing (only applies when Load Full Content is enabled)', + default: false, + optional: true, + additionalParams: true + } + ] + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const name = (nodeData.inputs?.name as string) || (nodeData.inputs?.arxivName as string) + const description = (nodeData.inputs?.description as string) || (nodeData.inputs?.arxivDescription as string) + const topKResults = nodeData.inputs?.topKResults as string + const maxQueryLength = nodeData.inputs?.maxQueryLength as string + const docContentCharsMax = nodeData.inputs?.docContentCharsMax as string + const loadFullContent = nodeData.inputs?.loadFullContent as boolean + const continueOnFailure = nodeData.inputs?.continueOnFailure as boolean + const legacyBuild = nodeData.inputs?.legacyBuild as boolean + + let logger + const orgId = options.orgId + if (process.env.DEBUG === 'true') { + logger = options.logger + } + + const obj: ArxivParameters = {} + if (description) obj.description = description + if (name) + obj.name = name + .toLowerCase() + .replace(/ /g, '_') + .replace(/[^a-z0-9_-]/g, '') + if (topKResults) obj.topKResults = parseInt(topKResults, 10) + if (maxQueryLength) obj.maxQueryLength = parseInt(maxQueryLength, 10) + if (docContentCharsMax) { + const maxChars = parseInt(docContentCharsMax, 10) + obj.docContentCharsMax = maxChars === 0 ? undefined : maxChars + } + if (loadFullContent !== undefined) obj.loadFullContent = loadFullContent + if (continueOnFailure !== undefined) obj.continueOnFailure = continueOnFailure + if (legacyBuild !== undefined) obj.legacyBuild = legacyBuild + + return new ArxivTool(obj, logger, orgId) + } +} + +module.exports = { nodeClass: Arxiv_Tools } diff --git a/packages/components/nodes/tools/Arxiv/arxiv.png b/packages/components/nodes/tools/Arxiv/arxiv.png new file mode 100644 index 000000000..9311a6909 Binary files /dev/null and b/packages/components/nodes/tools/Arxiv/arxiv.png differ diff --git a/packages/components/nodes/tools/Arxiv/core.ts b/packages/components/nodes/tools/Arxiv/core.ts new file mode 100644 index 000000000..a774eb3ad --- /dev/null +++ b/packages/components/nodes/tools/Arxiv/core.ts @@ -0,0 +1,266 @@ +import { z } from 'zod' +import fetch from 'node-fetch' +import { PDFLoader } from '@langchain/community/document_loaders/fs/pdf' +import { DynamicStructuredTool } from '../OpenAPIToolkit/core' + +export const desc = `Use this tool to search for academic papers on Arxiv. You can search by keywords, topics, authors, or specific Arxiv IDs. The tool can return either paper summaries or download and extract full paper content.` + +export interface ArxivParameters { + topKResults?: number + maxQueryLength?: number + docContentCharsMax?: number + loadFullContent?: boolean + continueOnFailure?: boolean + legacyBuild?: boolean + name?: string + description?: string +} + +interface ArxivResult { + id: string + title: string + authors: string[] + summary: string + published: string + updated: string + entryId: string +} + +// Schema for Arxiv search +const createArxivSchema = () => { + return z.object({ + query: z + .string() + .describe('Search query for Arxiv papers. Can be keywords, topics, authors, or specific Arxiv IDs (e.g., 2301.12345)') + }) +} + +export class ArxivTool extends DynamicStructuredTool { + topKResults = 3 + maxQueryLength = 300 + docContentCharsMax = 4000 + loadFullContent = false + continueOnFailure = false + legacyBuild = false + logger?: any + orgId?: string + + constructor(args?: ArxivParameters, logger?: any, orgId?: string) { + const schema = createArxivSchema() + + const toolInput = { + name: args?.name || 'arxiv_search', + description: args?.description || desc, + schema: schema, + baseUrl: '', + method: 'GET', + headers: {} + } + super(toolInput) + this.topKResults = args?.topKResults ?? this.topKResults + this.maxQueryLength = args?.maxQueryLength ?? this.maxQueryLength + this.docContentCharsMax = args?.docContentCharsMax ?? this.docContentCharsMax + this.loadFullContent = args?.loadFullContent ?? this.loadFullContent + this.continueOnFailure = args?.continueOnFailure ?? this.continueOnFailure + this.legacyBuild = args?.legacyBuild ?? this.legacyBuild + this.logger = logger + this.orgId = orgId + } + + private isArxivIdentifier(query: string): boolean { + const arxivIdentifierPattern = /\d{2}(0[1-9]|1[0-2])\.\d{4,5}(v\d+|)|\d{7}.*/ + const queryItems = query.substring(0, this.maxQueryLength).split(/\s+/) + + for (const queryItem of queryItems) { + const match = queryItem.match(arxivIdentifierPattern) + if (!match || match[0] !== queryItem) { + return false + } + } + return true + } + + private parseArxivResponse(xmlText: string): ArxivResult[] { + const results: ArxivResult[] = [] + + // Simple XML parsing for Arxiv API response + const entryRegex = /(.*?)<\/entry>/gs + const entries = xmlText.match(entryRegex) || [] + + for (const entry of entries) { + try { + const id = this.extractXmlValue(entry, 'id') + const title = this.extractXmlValue(entry, 'title')?.replace(/\n\s+/g, ' ').trim() + const summary = this.extractXmlValue(entry, 'summary')?.replace(/\n\s+/g, ' ').trim() + const published = this.extractXmlValue(entry, 'published') + const updated = this.extractXmlValue(entry, 'updated') + + // Extract authors + const authorRegex = /(.*?)<\/name><\/author>/g + const authors: string[] = [] + let authorMatch + while ((authorMatch = authorRegex.exec(entry)) !== null) { + authors.push(authorMatch[1]) + } + + if (id && title && summary) { + results.push({ + id, + title, + authors, + summary, + published: published || '', + updated: updated || '', + entryId: id + }) + } + } catch (error) { + console.warn('Error parsing Arxiv entry:', error) + } + } + + return results + } + + private extractXmlValue(xml: string, tag: string): string | undefined { + const regex = new RegExp(`<${tag}[^>]*>(.*?)`, 's') + const match = xml.match(regex) + return match ? match[1] : undefined + } + + private async fetchResults(query: string): Promise { + const baseUrl = 'http://export.arxiv.org/api/query' + let searchParams: URLSearchParams + + if (this.isArxivIdentifier(query)) { + // Search by ID + const ids = query.split(/\s+/).join(',') + searchParams = new URLSearchParams({ + id_list: ids, + max_results: this.topKResults.toString() + }) + } else { + // Search by query + // Remove problematic characters that can cause search issues + const cleanedQuery = query.replace(/[:-]/g, '').substring(0, this.maxQueryLength) + searchParams = new URLSearchParams({ + search_query: `all:${cleanedQuery}`, + max_results: this.topKResults.toString(), + sortBy: 'relevance', + sortOrder: 'descending' + }) + } + + const url = `${baseUrl}?${searchParams.toString()}` + this.logger?.info(`[${this.orgId}]: Making Arxiv API call to: ${url}`) + + const response = await fetch(url) + if (!response.ok) { + throw new Error(`Arxiv API error: ${response.status} ${response.statusText}`) + } + + const xmlText = await response.text() + return this.parseArxivResponse(xmlText) + } + + private async downloadAndExtractPdf(arxivId: string): Promise { + // Extract clean arxiv ID from full URL if needed + const cleanId = arxivId.replace('http://arxiv.org/abs/', '').replace('https://arxiv.org/abs/', '') + const pdfUrl = `https://arxiv.org/pdf/${cleanId}.pdf` + + this.logger?.info(`[${this.orgId}]: Downloading PDF from: ${pdfUrl}`) + + const response = await fetch(pdfUrl) + if (!response.ok) { + throw new Error(`Failed to download PDF: ${response.status} ${response.statusText}`) + } + + // Get PDF buffer and create blob + const buffer = await response.buffer() + const blob = new Blob([buffer]) + + // Use PDFLoader to extract text (same as Pdf.ts) + const loader = new PDFLoader(blob, { + splitPages: false, + pdfjs: () => + // @ts-ignore + this.legacyBuild ? import('pdfjs-dist/legacy/build/pdf.js') : import('pdf-parse/lib/pdf.js/v1.10.100/build/pdf.js') + }) + + const docs = await loader.load() + return docs.map((doc) => doc.pageContent).join('\n') + } + + /** @ignore */ + async _call(arg: any): Promise { + const { query } = arg + + if (!query) { + throw new Error('Query is required for Arxiv search') + } + + try { + const results = await this.fetchResults(query) + + if (results.length === 0) { + return 'No good Arxiv Result was found' + } + + if (!this.loadFullContent) { + // Return summaries only (original behavior) + const docs = results.map((result) => { + const publishedDate = result.published ? new Date(result.published).toISOString().split('T')[0] : 'Unknown' + return `Published: ${publishedDate}\nTitle: ${result.title}\nAuthors: ${result.authors.join(', ')}\nSummary: ${ + result.summary + }` + }) + + const fullText = docs.join('\n\n') + return this.docContentCharsMax ? fullText.substring(0, this.docContentCharsMax) : fullText + } else { + // Download PDFs and extract full content + const docs: string[] = [] + + for (const result of results) { + try { + this.logger?.info(`[${this.orgId}]: Processing paper: ${result.title}`) + + // Download and extract PDF content + const fullText = await this.downloadAndExtractPdf(result.id) + + const publishedDate = result.published ? new Date(result.published).toISOString().split('T')[0] : 'Unknown' + + // Format with metadata and full content + const docContent = `Published: ${publishedDate}\nTitle: ${result.title}\nAuthors: ${result.authors.join( + ', ' + )}\nSummary: ${result.summary}\n\nFull Content:\n${fullText}` + + const truncatedContent = this.docContentCharsMax ? docContent.substring(0, this.docContentCharsMax) : docContent + + docs.push(truncatedContent) + } catch (error) { + const errorMessage = error instanceof Error ? error.message : 'Unknown error' + console.error(`Error processing paper ${result.title}:`, errorMessage) + + if (!this.continueOnFailure) { + throw new Error(`Failed to process paper "${result.title}": ${errorMessage}`) + } else { + // Add error notice and continue with summary only + const publishedDate = result.published ? new Date(result.published).toISOString().split('T')[0] : 'Unknown' + const fallbackContent = `Published: ${publishedDate}\nTitle: ${result.title}\nAuthors: ${result.authors.join( + ', ' + )}\nSummary: ${result.summary}\n\n[ERROR: Could not load full content - ${errorMessage}]` + docs.push(fallbackContent) + } + } + } + + return docs.join('\n\n---\n\n') + } + } catch (error) { + const errorMessage = error instanceof Error ? error.message : 'Unknown error' + console.error('Arxiv search error:', errorMessage) + throw new Error(`Failed to search Arxiv: ${errorMessage}`) + } + } +} diff --git a/packages/components/nodes/tools/ChainTool/core.ts b/packages/components/nodes/tools/ChainTool/core.ts index 1848b81ca..e43c126f8 100644 --- a/packages/components/nodes/tools/ChainTool/core.ts +++ b/packages/components/nodes/tools/ChainTool/core.ts @@ -14,17 +14,41 @@ export class ChainTool extends DynamicTool { super({ ...rest, func: async (input, runManager) => { - const childManagers = runManager?.getChild() - const handlers = childManagers?.handlers?.filter((handler) => !(handler instanceof CustomChainHandler)) || [] - if (childManagers) childManagers.handlers = handlers + // prevent sending SSE events of the sub-chain + const sseStreamer = runManager?.handlers.find((handler) => handler instanceof CustomChainHandler)?.sseStreamer + if (runManager) { + const callbacks = runManager.handlers + for (let i = 0; i < callbacks.length; i += 1) { + if (callbacks[i] instanceof CustomChainHandler) { + ;(callbacks[i] as any).sseStreamer = undefined + } + } + } if ((chain as any).prompt && (chain as any).prompt.promptValues) { const promptValues = handleEscapeCharacters((chain as any).prompt.promptValues, true) - const values = await chain.call(promptValues, childManagers) + + const values = await chain.call(promptValues, runManager?.getChild()) + if (runManager && sseStreamer) { + const callbacks = runManager.handlers + for (let i = 0; i < callbacks.length; i += 1) { + if (callbacks[i] instanceof CustomChainHandler) { + ;(callbacks[i] as any).sseStreamer = sseStreamer + } + } + } return values?.text } - const values = chain.run(input, childManagers) + const values = chain.run(input, runManager?.getChild()) + if (runManager && sseStreamer) { + const callbacks = runManager.handlers + for (let i = 0; i < callbacks.length; i += 1) { + if (callbacks[i] instanceof CustomChainHandler) { + ;(callbacks[i] as any).sseStreamer = sseStreamer + } + } + } return values } }) diff --git a/packages/components/nodes/tools/ChatflowTool/ChatflowTool.ts b/packages/components/nodes/tools/ChatflowTool/ChatflowTool.ts index c22e0f35c..a2db7fbf3 100644 --- a/packages/components/nodes/tools/ChatflowTool/ChatflowTool.ts +++ b/packages/components/nodes/tools/ChatflowTool/ChatflowTool.ts @@ -1,11 +1,17 @@ import { DataSource } from 'typeorm' import { z } from 'zod' -import { NodeVM } from '@flowiseai/nodevm' import { RunnableConfig } from '@langchain/core/runnables' import { CallbackManagerForToolRun, Callbacks, CallbackManager, parseCallbackConfigArg } from '@langchain/core/callbacks/manager' import { StructuredTool } from '@langchain/core/tools' import { ICommonObject, IDatabaseEntity, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../src/Interface' -import { availableDependencies, defaultAllowBuiltInDep, getCredentialData, getCredentialParam } from '../../../src/utils' +import { + getCredentialData, + getCredentialParam, + executeJavaScriptCode, + createCodeExecutionSandbox, + parseWithTypeConversion +} from '../../../src/utils' +import { isValidUUID, isValidURL } from '../../../src/validator' import { v4 as uuidv4 } from 'uuid' class ChatflowTool_Tools implements INode { @@ -23,7 +29,7 @@ class ChatflowTool_Tools implements INode { constructor() { this.label = 'Chatflow Tool' this.name = 'ChatflowTool' - this.version = 5.0 + this.version = 5.1 this.type = 'ChatflowTool' this.icon = 'chatflowTool.svg' this.category = 'Tools' @@ -69,7 +75,8 @@ class ChatflowTool_Tools implements INode { description: 'Override the config passed to the Chatflow.', type: 'json', optional: true, - additionalParams: true + additionalParams: true, + acceptVariable: true }, { label: 'Base URL', @@ -106,7 +113,10 @@ class ChatflowTool_Tools implements INode { type: 'string', description: 'Custom input to be passed to the chatflow. Leave empty to let LLM decides the input.', optional: true, - additionalParams: true + additionalParams: true, + show: { + useQuestionFromChat: false + } } ] } @@ -122,12 +132,24 @@ class ChatflowTool_Tools implements INode { return returnData } - const chatflows = await appDataSource.getRepository(databaseEntities['ChatFlow']).find() + const searchOptions = options.searchOptions || {} + const chatflows = await appDataSource.getRepository(databaseEntities['ChatFlow']).findBy(searchOptions) for (let i = 0; i < chatflows.length; i += 1) { + let type = chatflows[i].type + if (type === 'AGENTFLOW') { + type = 'AgentflowV2' + } else if (type === 'MULTIAGENT') { + type = 'AgentflowV1' + } else if (type === 'ASSISTANT') { + type = 'Custom Assistant' + } else { + type = 'Chatflow' + } const data = { label: chatflows[i].name, - name: chatflows[i].id + name: chatflows[i].id, + description: type } as INodeOptionsValue returnData.push(data) } @@ -153,6 +175,16 @@ class ChatflowTool_Tools implements INode { const baseURL = (nodeData.inputs?.baseURL as string) || (options.baseURL as string) + // Validate selectedChatflowId is a valid UUID + if (!selectedChatflowId || !isValidUUID(selectedChatflowId)) { + throw new Error('Invalid chatflow ID: must be a valid UUID') + } + + // Validate baseURL is a valid URL + if (!baseURL || !isValidURL(baseURL)) { + throw new Error('Invalid base URL: must be a valid URL') + } + const credentialData = await getCredentialData(nodeData.credential ?? '', options) const chatflowApiKey = getCredentialParam('chatflowApiKey', credentialData, nodeData) @@ -255,7 +287,7 @@ class ChatflowTool extends StructuredTool { } let parsed try { - parsed = await this.schema.parseAsync(arg) + parsed = await parseWithTypeConversion(this.schema, arg) } catch (e) { throw new Error(`Received tool input did not match expected schema: ${JSON.stringify(arg)}`) } @@ -319,16 +351,6 @@ class ChatflowTool extends StructuredTool { body: JSON.stringify(body) } - let sandbox = { - $callOptions: options, - $callBody: body, - util: undefined, - Symbol: undefined, - child_process: undefined, - fs: undefined, - process: undefined - } - const code = ` const fetch = require('node-fetch'); const url = "${this.baseURL}/api/v1/prediction/${this.chatflowid}"; @@ -346,26 +368,22 @@ try { return ''; } ` - const builtinDeps = process.env.TOOL_FUNCTION_BUILTIN_DEP - ? defaultAllowBuiltInDep.concat(process.env.TOOL_FUNCTION_BUILTIN_DEP.split(',')) - : defaultAllowBuiltInDep - const externalDeps = process.env.TOOL_FUNCTION_EXTERNAL_DEP ? process.env.TOOL_FUNCTION_EXTERNAL_DEP.split(',') : [] - const deps = availableDependencies.concat(externalDeps) - const vmOptions = { - console: 'inherit', - sandbox, - require: { - external: { modules: deps }, - builtin: builtinDeps - }, - eval: false, - wasm: false, - timeout: 10000 - } as any + // Create additional sandbox variables + const additionalSandbox: ICommonObject = { + $callOptions: options, + $callBody: body + } - const vm = new NodeVM(vmOptions) - const response = await vm.run(`module.exports = async function() {${code}}()`, __dirname) + const sandbox = createCodeExecutionSandbox('', [], {}, additionalSandbox) + + let response = await executeJavaScriptCode(code, sandbox, { + useSandbox: false + }) + + if (typeof response === 'object') { + response = JSON.stringify(response) + } return response } diff --git a/packages/components/nodes/tools/CodeInterpreterE2B/CodeInterpreterE2B.ts b/packages/components/nodes/tools/CodeInterpreterE2B/CodeInterpreterE2B.ts index 8c3967dc9..544ed856a 100644 --- a/packages/components/nodes/tools/CodeInterpreterE2B/CodeInterpreterE2B.ts +++ b/packages/components/nodes/tools/CodeInterpreterE2B/CodeInterpreterE2B.ts @@ -1,7 +1,7 @@ import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' -import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' +import { getBaseClasses, getCredentialData, getCredentialParam, parseWithTypeConversion } from '../../../src/utils' import { StructuredTool, ToolInputParsingException, ToolParams } from '@langchain/core/tools' -import { CodeInterpreter } from '@e2b/code-interpreter' +import { Sandbox } from '@e2b/code-interpreter' import { z } from 'zod' import { addSingleFileToStorage } from '../../../src/storageUtils' import { CallbackManager, CallbackManagerForToolRun, Callbacks, parseCallbackConfigArg } from '@langchain/core/callbacks/manager' @@ -80,7 +80,8 @@ class Code_Interpreter_Tools implements INode { schema: z.object({ input: z.string().describe('Python code to be executed in the sandbox environment') }), - chatflowid: options.chatflowid + chatflowid: options.chatflowid, + orgId: options.orgId }) } } @@ -92,6 +93,7 @@ type E2BToolInput = { apiKey: string schema: any chatflowid: string + orgId: string templateCodeInterpreterE2B?: string domainCodeInterpreterE2B?: string } @@ -105,7 +107,7 @@ export class E2BTool extends StructuredTool { description = DESC - instance: CodeInterpreter + instance: Sandbox apiKey: string @@ -113,6 +115,8 @@ export class E2BTool extends StructuredTool { chatflowid: string + orgId: string + flowObj: ICommonObject templateCodeInterpreterE2B?: string @@ -125,6 +129,7 @@ export class E2BTool extends StructuredTool { this.apiKey = options.apiKey this.schema = options.schema this.chatflowid = options.chatflowid + this.orgId = options.orgId this.templateCodeInterpreterE2B = options.templateCodeInterpreterE2B this.domainCodeInterpreterE2B = options.domainCodeInterpreterE2B } @@ -136,6 +141,7 @@ export class E2BTool extends StructuredTool { apiKey: options.apiKey, schema: options.schema, chatflowid: options.chatflowid, + orgId: options.orgId, templateCodeInterpreterE2B: options.templateCodeInterpreterE2B, domainCodeInterpreterE2B: options.domainCodeInterpreterE2B }) @@ -153,7 +159,7 @@ export class E2BTool extends StructuredTool { } let parsed try { - parsed = await this.schema.parseAsync(arg) + parsed = await parseWithTypeConversion(this.schema, arg) } catch (e) { throw new ToolInputParsingException(`Received tool input did not match expected schema`, JSON.stringify(arg)) } @@ -198,8 +204,8 @@ export class E2BTool extends StructuredTool { flowConfig = { ...this.flowObj, ...flowConfig } try { if ('input' in arg) { - this.instance = await CodeInterpreter.create({ apiKey: this.apiKey }) - const execution = await this.instance.notebook.execCell(arg?.input) + this.instance = await Sandbox.create({ apiKey: this.apiKey }) + const execution = await this.instance.runCode(arg?.input, { language: 'python' }) const artifacts = [] for (const result of execution.results) { @@ -212,36 +218,39 @@ export class E2BTool extends StructuredTool { const filename = `artifact_${Date.now()}.png` - const res = await addSingleFileToStorage( + // Don't check storage usage because this is incoming file, and if we throw error, agent will keep on retrying + const { path } = await addSingleFileToStorage( 'image/png', pngData, filename, + this.orgId, this.chatflowid, flowConfig!.chatId as string ) - artifacts.push({ type: 'png', data: res }) + + artifacts.push({ type: 'png', data: path }) } else if (key === 'jpeg') { //@ts-ignore const jpegData = Buffer.from(result.jpeg, 'base64') const filename = `artifact_${Date.now()}.jpg` - const res = await addSingleFileToStorage( + const { path } = await addSingleFileToStorage( 'image/jpg', jpegData, filename, + this.orgId, this.chatflowid, flowConfig!.chatId as string ) - artifacts.push({ type: 'jpeg', data: res }) + + artifacts.push({ type: 'jpeg', data: path }) } else if (key === 'html' || key === 'markdown' || key === 'latex' || key === 'json' || key === 'javascript') { artifacts.push({ type: key, data: (result as any)[key] }) } //TODO: support for pdf } } - this.instance.close() - let output = '' if (execution.text) output = execution.text @@ -256,7 +265,7 @@ export class E2BTool extends StructuredTool { return 'No input provided' } } catch (e) { - if (this.instance) this.instance.close() + if (this.instance) this.instance.kill() return typeof e === 'string' ? e : JSON.stringify(e, null, 2) } } diff --git a/packages/components/nodes/tools/CustomTool/CustomTool.ts b/packages/components/nodes/tools/CustomTool/CustomTool.ts index f82b5d4f7..514376aa8 100644 --- a/packages/components/nodes/tools/CustomTool/CustomTool.ts +++ b/packages/components/nodes/tools/CustomTool/CustomTool.ts @@ -3,6 +3,7 @@ import { convertSchemaToZod, getBaseClasses, getVars } from '../../../src/utils' import { DynamicStructuredTool } from './core' import { z } from 'zod' import { DataSource } from 'typeorm' +import { SecureZodSchemaParser } from '../../../src/secureZodParser' class CustomTool_Tools implements INode { label: string @@ -77,7 +78,8 @@ class CustomTool_Tools implements INode { return returnData } - const tools = await appDataSource.getRepository(databaseEntities['Tool']).find() + const searchOptions = options.searchOptions || {} + const tools = await appDataSource.getRepository(databaseEntities['Tool']).findBy(searchOptions) for (let i = 0; i < tools.length; i += 1) { const data = { @@ -118,11 +120,10 @@ class CustomTool_Tools implements INode { if (customToolName) obj.name = customToolName if (customToolDesc) obj.description = customToolDesc if (customToolSchema) { - const zodSchemaFunction = new Function('z', `return ${customToolSchema}`) - obj.schema = zodSchemaFunction(z) + obj.schema = SecureZodSchemaParser.parseZodSchema(customToolSchema) as z.ZodObject } - const variables = await getVars(appDataSource, databaseEntities, nodeData) + const variables = await getVars(appDataSource, databaseEntities, nodeData, options) const flow = { chatflowId: options.chatflowid } diff --git a/packages/components/nodes/tools/CustomTool/core.ts b/packages/components/nodes/tools/CustomTool/core.ts index e64f776b8..06ebd3e29 100644 --- a/packages/components/nodes/tools/CustomTool/core.ts +++ b/packages/components/nodes/tools/CustomTool/core.ts @@ -1,9 +1,8 @@ import { z } from 'zod' -import { NodeVM } from '@flowiseai/nodevm' import { RunnableConfig } from '@langchain/core/runnables' import { StructuredTool, ToolParams } from '@langchain/core/tools' import { CallbackManagerForToolRun, Callbacks, CallbackManager, parseCallbackConfigArg } from '@langchain/core/callbacks/manager' -import { availableDependencies, defaultAllowBuiltInDep, prepareSandboxVars } from '../../../src/utils' +import { executeJavaScriptCode, createCodeExecutionSandbox, parseWithTypeConversion } from '../../../src/utils' import { ICommonObject } from '../../../src/Interface' class ToolInputParsingException extends Error { @@ -69,7 +68,7 @@ export class DynamicStructuredTool< } let parsed try { - parsed = await this.schema.parseAsync(arg) + parsed = await parseWithTypeConversion(this.schema, arg) } catch (e) { throw new ToolInputParsingException(`Received tool input did not match expected schema`, JSON.stringify(arg)) } @@ -111,47 +110,26 @@ export class DynamicStructuredTool< _?: CallbackManagerForToolRun, flowConfig?: { sessionId?: string; chatId?: string; input?: string; state?: ICommonObject } ): Promise { - let sandbox: any = { - util: undefined, - Symbol: undefined, - child_process: undefined, - fs: undefined, - process: undefined - } + // Create additional sandbox variables for tool arguments + const additionalSandbox: ICommonObject = {} + if (typeof arg === 'object' && Object.keys(arg).length) { for (const item in arg) { - sandbox[`$${item}`] = arg[item] + additionalSandbox[`$${item}`] = arg[item] } } - sandbox['$vars'] = prepareSandboxVars(this.variables) + // Prepare flow object for sandbox + const flow = this.flowObj ? { ...this.flowObj, ...flowConfig } : {} - // inject flow properties - if (this.flowObj) { - sandbox['$flow'] = { ...this.flowObj, ...flowConfig } + const sandbox = createCodeExecutionSandbox('', this.variables || [], flow, additionalSandbox) + + let response = await executeJavaScriptCode(this.code, sandbox) + + if (typeof response === 'object') { + response = JSON.stringify(response) } - const builtinDeps = process.env.TOOL_FUNCTION_BUILTIN_DEP - ? defaultAllowBuiltInDep.concat(process.env.TOOL_FUNCTION_BUILTIN_DEP.split(',')) - : defaultAllowBuiltInDep - const externalDeps = process.env.TOOL_FUNCTION_EXTERNAL_DEP ? process.env.TOOL_FUNCTION_EXTERNAL_DEP.split(',') : [] - const deps = availableDependencies.concat(externalDeps) - - const options = { - console: 'inherit', - sandbox, - require: { - external: { modules: deps }, - builtin: builtinDeps - }, - eval: false, - wasm: false, - timeout: 10000 - } as any - - const vm = new NodeVM(options) - const response = await vm.run(`module.exports = async function() {${this.code}}()`, __dirname) - return response } diff --git a/packages/components/nodes/tools/Gmail/Gmail.ts b/packages/components/nodes/tools/Gmail/Gmail.ts new file mode 100644 index 000000000..f164680aa --- /dev/null +++ b/packages/components/nodes/tools/Gmail/Gmail.ts @@ -0,0 +1,630 @@ +import { convertMultiOptionsToStringArray, getCredentialData, getCredentialParam, refreshOAuth2Token } from '../../../src/utils' +import { createGmailTools } from './core' +import type { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' + +class Gmail_Tools implements INode { + label: string + name: string + version: number + type: string + icon: string + category: string + description: string + baseClasses: string[] + credential: INodeParams + inputs: INodeParams[] + + constructor() { + this.label = 'Gmail' + this.name = 'gmail' + this.version = 1.0 + this.type = 'Gmail' + this.icon = 'gmail.svg' + this.category = 'Tools' + this.description = 'Perform Gmail operations for drafts, messages, labels, and threads' + this.baseClasses = [this.type, 'Tool'] + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + credentialNames: ['gmailOAuth2'] + } + this.inputs = [ + { + label: 'Type', + name: 'gmailType', + type: 'options', + options: [ + { + label: 'Drafts', + name: 'drafts' + }, + { + label: 'Messages', + name: 'messages' + }, + { + label: 'Labels', + name: 'labels' + }, + { + label: 'Threads', + name: 'threads' + } + ] + }, + // Draft Actions + { + label: 'Draft Actions', + name: 'draftActions', + type: 'multiOptions', + options: [ + { + label: 'List Drafts', + name: 'listDrafts' + }, + { + label: 'Create Draft', + name: 'createDraft' + }, + { + label: 'Get Draft', + name: 'getDraft' + }, + { + label: 'Update Draft', + name: 'updateDraft' + }, + { + label: 'Send Draft', + name: 'sendDraft' + }, + { + label: 'Delete Draft', + name: 'deleteDraft' + } + ], + show: { + gmailType: ['drafts'] + } + }, + // Message Actions + { + label: 'Message Actions', + name: 'messageActions', + type: 'multiOptions', + options: [ + { + label: 'List Messages', + name: 'listMessages' + }, + { + label: 'Get Message', + name: 'getMessage' + }, + { + label: 'Send Message', + name: 'sendMessage' + }, + { + label: 'Modify Message', + name: 'modifyMessage' + }, + { + label: 'Trash Message', + name: 'trashMessage' + }, + { + label: 'Untrash Message', + name: 'untrashMessage' + }, + { + label: 'Delete Message', + name: 'deleteMessage' + } + ], + show: { + gmailType: ['messages'] + } + }, + // Label Actions + { + label: 'Label Actions', + name: 'labelActions', + type: 'multiOptions', + options: [ + { + label: 'List Labels', + name: 'listLabels' + }, + { + label: 'Get Label', + name: 'getLabel' + }, + { + label: 'Create Label', + name: 'createLabel' + }, + { + label: 'Update Label', + name: 'updateLabel' + }, + { + label: 'Delete Label', + name: 'deleteLabel' + } + ], + show: { + gmailType: ['labels'] + } + }, + // Thread Actions + { + label: 'Thread Actions', + name: 'threadActions', + type: 'multiOptions', + options: [ + { + label: 'List Threads', + name: 'listThreads' + }, + { + label: 'Get Thread', + name: 'getThread' + }, + { + label: 'Modify Thread', + name: 'modifyThread' + }, + { + label: 'Trash Thread', + name: 'trashThread' + }, + { + label: 'Untrash Thread', + name: 'untrashThread' + }, + { + label: 'Delete Thread', + name: 'deleteThread' + } + ], + show: { + gmailType: ['threads'] + } + }, + // DRAFT PARAMETERS + // List Drafts Parameters + { + label: 'Max Results', + name: 'draftMaxResults', + type: 'number', + description: 'Maximum number of drafts to return', + default: 100, + show: { + draftActions: ['listDrafts'] + }, + additionalParams: true, + optional: true + }, + // Create Draft Parameters + { + label: 'To', + name: 'draftTo', + type: 'string', + description: 'Recipient email address(es), comma-separated', + placeholder: 'user1@example.com,user2@example.com', + show: { + draftActions: ['createDraft'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Subject', + name: 'draftSubject', + type: 'string', + description: 'Email subject', + placeholder: 'Email Subject', + show: { + draftActions: ['createDraft'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Body', + name: 'draftBody', + type: 'string', + description: 'Email body content', + placeholder: 'Email content', + rows: 4, + show: { + draftActions: ['createDraft'] + }, + additionalParams: true, + optional: true + }, + { + label: 'CC', + name: 'draftCc', + type: 'string', + description: 'CC email address(es), comma-separated', + placeholder: 'cc1@example.com,cc2@example.com', + show: { + draftActions: ['createDraft'] + }, + additionalParams: true, + optional: true + }, + { + label: 'BCC', + name: 'draftBcc', + type: 'string', + description: 'BCC email address(es), comma-separated', + placeholder: 'bcc1@example.com,bcc2@example.com', + show: { + draftActions: ['createDraft'] + }, + additionalParams: true, + optional: true + }, + // Draft ID for Get/Update/Send/Delete + { + label: 'Draft ID', + name: 'draftId', + type: 'string', + description: 'ID of the draft', + show: { + draftActions: ['getDraft', 'updateDraft', 'sendDraft', 'deleteDraft'] + }, + additionalParams: true, + optional: true + }, + // Update Draft Parameters + { + label: 'To (Update)', + name: 'draftUpdateTo', + type: 'string', + description: 'Recipient email address(es), comma-separated', + placeholder: 'user1@example.com,user2@example.com', + show: { + draftActions: ['updateDraft'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Subject (Update)', + name: 'draftUpdateSubject', + type: 'string', + description: 'Email subject', + placeholder: 'Email Subject', + show: { + draftActions: ['updateDraft'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Body (Update)', + name: 'draftUpdateBody', + type: 'string', + description: 'Email body content', + placeholder: 'Email content', + rows: 4, + show: { + draftActions: ['updateDraft'] + }, + additionalParams: true, + optional: true + }, + // MESSAGE PARAMETERS + // List Messages Parameters + { + label: 'Max Results', + name: 'messageMaxResults', + type: 'number', + description: 'Maximum number of messages to return', + default: 100, + show: { + messageActions: ['listMessages'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Query', + name: 'messageQuery', + type: 'string', + description: 'Query string for filtering results (Gmail search syntax)', + placeholder: 'is:unread from:example@gmail.com', + show: { + messageActions: ['listMessages'] + }, + additionalParams: true, + optional: true + }, + // Send Message Parameters + { + label: 'To', + name: 'messageTo', + type: 'string', + description: 'Recipient email address(es), comma-separated', + placeholder: 'user1@example.com,user2@example.com', + show: { + messageActions: ['sendMessage'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Subject', + name: 'messageSubject', + type: 'string', + description: 'Email subject', + placeholder: 'Email Subject', + show: { + messageActions: ['sendMessage'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Body', + name: 'messageBody', + type: 'string', + description: 'Email body content', + placeholder: 'Email content', + rows: 4, + show: { + messageActions: ['sendMessage'] + }, + additionalParams: true, + optional: true + }, + { + label: 'CC', + name: 'messageCc', + type: 'string', + description: 'CC email address(es), comma-separated', + placeholder: 'cc1@example.com,cc2@example.com', + show: { + messageActions: ['sendMessage'] + }, + additionalParams: true, + optional: true + }, + { + label: 'BCC', + name: 'messageBcc', + type: 'string', + description: 'BCC email address(es), comma-separated', + placeholder: 'bcc1@example.com,bcc2@example.com', + show: { + messageActions: ['sendMessage'] + }, + additionalParams: true, + optional: true + }, + // Message ID for Get/Modify/Trash/Untrash/Delete + { + label: 'Message ID', + name: 'messageId', + type: 'string', + description: 'ID of the message', + show: { + messageActions: ['getMessage', 'modifyMessage', 'trashMessage', 'untrashMessage', 'deleteMessage'] + }, + additionalParams: true, + optional: true + }, + // Message Label Modification + { + label: 'Add Label IDs', + name: 'messageAddLabelIds', + type: 'string', + description: 'Comma-separated label IDs to add', + placeholder: 'INBOX,STARRED', + show: { + messageActions: ['modifyMessage'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Remove Label IDs', + name: 'messageRemoveLabelIds', + type: 'string', + description: 'Comma-separated label IDs to remove', + placeholder: 'UNREAD,SPAM', + show: { + messageActions: ['modifyMessage'] + }, + additionalParams: true, + optional: true + }, + // LABEL PARAMETERS + // Create Label Parameters + { + label: 'Label Name', + name: 'labelName', + type: 'string', + description: 'Name of the label', + placeholder: 'Important', + show: { + labelActions: ['createLabel', 'updateLabel'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Label Color', + name: 'labelColor', + type: 'string', + description: 'Color of the label (hex color code)', + placeholder: '#ff0000', + show: { + labelActions: ['createLabel', 'updateLabel'] + }, + additionalParams: true, + optional: true + }, + // Label ID for Get/Update/Delete + { + label: 'Label ID', + name: 'labelId', + type: 'string', + description: 'ID of the label', + show: { + labelActions: ['getLabel', 'updateLabel', 'deleteLabel'] + }, + additionalParams: true, + optional: true + }, + // THREAD PARAMETERS + // List Threads Parameters + { + label: 'Max Results', + name: 'threadMaxResults', + type: 'number', + description: 'Maximum number of threads to return', + default: 100, + show: { + threadActions: ['listThreads'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Query', + name: 'threadQuery', + type: 'string', + description: 'Query string for filtering results (Gmail search syntax)', + placeholder: 'is:unread from:example@gmail.com', + show: { + threadActions: ['listThreads'] + }, + additionalParams: true, + optional: true + }, + // Thread ID for Get/Modify/Trash/Untrash/Delete + { + label: 'Thread ID', + name: 'threadId', + type: 'string', + description: 'ID of the thread', + show: { + threadActions: ['getThread', 'modifyThread', 'trashThread', 'untrashThread', 'deleteThread'] + }, + additionalParams: true, + optional: true + }, + // Thread Label Modification + { + label: 'Add Label IDs', + name: 'threadAddLabelIds', + type: 'string', + description: 'Comma-separated label IDs to add', + placeholder: 'INBOX,STARRED', + show: { + threadActions: ['modifyThread'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Remove Label IDs', + name: 'threadRemoveLabelIds', + type: 'string', + description: 'Comma-separated label IDs to remove', + placeholder: 'UNREAD,SPAM', + show: { + threadActions: ['modifyThread'] + }, + additionalParams: true, + optional: true + } + ] + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + let credentialData = await getCredentialData(nodeData.credential ?? '', options) + credentialData = await refreshOAuth2Token(nodeData.credential ?? '', credentialData, options) + const accessToken = getCredentialParam('access_token', credentialData, nodeData) + + if (!accessToken) { + throw new Error('No access token found in credential') + } + + // Get all actions based on type + const gmailType = nodeData.inputs?.gmailType as string + let actions: string[] = [] + + if (gmailType === 'drafts') { + actions = convertMultiOptionsToStringArray(nodeData.inputs?.draftActions) + } else if (gmailType === 'messages') { + actions = convertMultiOptionsToStringArray(nodeData.inputs?.messageActions) + } else if (gmailType === 'labels') { + actions = convertMultiOptionsToStringArray(nodeData.inputs?.labelActions) + } else if (gmailType === 'threads') { + actions = convertMultiOptionsToStringArray(nodeData.inputs?.threadActions) + } + + const defaultParams = this.transformNodeInputsToToolArgs(nodeData) + + // Create and return tools based on selected actions + const tools = createGmailTools({ + actions, + accessToken, + defaultParams + }) + + return tools + } + + transformNodeInputsToToolArgs(nodeData: INodeData): Record { + // Collect default parameters from inputs + const defaultParams: Record = {} + + // Draft parameters + if (nodeData.inputs?.draftMaxResults) defaultParams.draftMaxResults = nodeData.inputs.draftMaxResults + if (nodeData.inputs?.draftTo) defaultParams.draftTo = nodeData.inputs.draftTo + if (nodeData.inputs?.draftSubject) defaultParams.draftSubject = nodeData.inputs.draftSubject + if (nodeData.inputs?.draftBody) defaultParams.draftBody = nodeData.inputs.draftBody + if (nodeData.inputs?.draftCc) defaultParams.draftCc = nodeData.inputs.draftCc + if (nodeData.inputs?.draftBcc) defaultParams.draftBcc = nodeData.inputs.draftBcc + if (nodeData.inputs?.draftId) defaultParams.draftId = nodeData.inputs.draftId + if (nodeData.inputs?.draftUpdateTo) defaultParams.draftUpdateTo = nodeData.inputs.draftUpdateTo + if (nodeData.inputs?.draftUpdateSubject) defaultParams.draftUpdateSubject = nodeData.inputs.draftUpdateSubject + if (nodeData.inputs?.draftUpdateBody) defaultParams.draftUpdateBody = nodeData.inputs.draftUpdateBody + + // Message parameters + if (nodeData.inputs?.messageMaxResults) defaultParams.messageMaxResults = nodeData.inputs.messageMaxResults + if (nodeData.inputs?.messageQuery) defaultParams.messageQuery = nodeData.inputs.messageQuery + if (nodeData.inputs?.messageTo) defaultParams.messageTo = nodeData.inputs.messageTo + if (nodeData.inputs?.messageSubject) defaultParams.messageSubject = nodeData.inputs.messageSubject + if (nodeData.inputs?.messageBody) defaultParams.messageBody = nodeData.inputs.messageBody + if (nodeData.inputs?.messageCc) defaultParams.messageCc = nodeData.inputs.messageCc + if (nodeData.inputs?.messageBcc) defaultParams.messageBcc = nodeData.inputs.messageBcc + if (nodeData.inputs?.messageId) defaultParams.messageId = nodeData.inputs.messageId + if (nodeData.inputs?.messageAddLabelIds) defaultParams.messageAddLabelIds = nodeData.inputs.messageAddLabelIds + if (nodeData.inputs?.messageRemoveLabelIds) defaultParams.messageRemoveLabelIds = nodeData.inputs.messageRemoveLabelIds + + // Label parameters + if (nodeData.inputs?.labelName) defaultParams.labelName = nodeData.inputs.labelName + if (nodeData.inputs?.labelColor) defaultParams.labelColor = nodeData.inputs.labelColor + if (nodeData.inputs?.labelId) defaultParams.labelId = nodeData.inputs.labelId + + // Thread parameters + if (nodeData.inputs?.threadMaxResults) defaultParams.threadMaxResults = nodeData.inputs.threadMaxResults + if (nodeData.inputs?.threadQuery) defaultParams.threadQuery = nodeData.inputs.threadQuery + if (nodeData.inputs?.threadId) defaultParams.threadId = nodeData.inputs.threadId + if (nodeData.inputs?.threadAddLabelIds) defaultParams.threadAddLabelIds = nodeData.inputs.threadAddLabelIds + if (nodeData.inputs?.threadRemoveLabelIds) defaultParams.threadRemoveLabelIds = nodeData.inputs.threadRemoveLabelIds + + return defaultParams + } +} + +module.exports = { nodeClass: Gmail_Tools } diff --git a/packages/components/nodes/tools/Gmail/core.ts b/packages/components/nodes/tools/Gmail/core.ts new file mode 100644 index 000000000..00f053c03 --- /dev/null +++ b/packages/components/nodes/tools/Gmail/core.ts @@ -0,0 +1,1079 @@ +import { z } from 'zod' +import fetch from 'node-fetch' +import { DynamicStructuredTool } from '../OpenAPIToolkit/core' +import { TOOL_ARGS_PREFIX, formatToolError } from '../../../src/agents' + +export const desc = `Use this when you want to access Gmail API for managing drafts, messages, labels, and threads` + +export interface Headers { + [key: string]: string +} + +export interface Body { + [key: string]: any +} + +export interface RequestParameters { + headers?: Headers + body?: Body + url?: string + description?: string + name?: string + actions?: string[] + accessToken?: string + defaultParams?: any +} + +// Define schemas for different Gmail operations +const ListSchema = z.object({ + maxResults: z.number().optional().default(100).describe('Maximum number of results to return'), + query: z.string().optional().describe('Query string for filtering results (Gmail search syntax)') +}) + +const CreateDraftSchema = z.object({ + to: z.string().describe('Recipient email address(es), comma-separated'), + subject: z.string().optional().describe('Email subject'), + body: z.string().optional().describe('Email body content'), + cc: z.string().optional().describe('CC email address(es), comma-separated'), + bcc: z.string().optional().describe('BCC email address(es), comma-separated') +}) + +const SendMessageSchema = z.object({ + to: z.string().describe('Recipient email address(es), comma-separated'), + subject: z.string().optional().describe('Email subject'), + body: z.string().optional().describe('Email body content'), + cc: z.string().optional().describe('CC email address(es), comma-separated'), + bcc: z.string().optional().describe('BCC email address(es), comma-separated') +}) + +const GetByIdSchema = z.object({ + id: z.string().describe('ID of the resource') +}) + +const ModifySchema = z.object({ + id: z.string().describe('ID of the resource'), + addLabelIds: z.array(z.string()).optional().describe('Label IDs to add'), + removeLabelIds: z.array(z.string()).optional().describe('Label IDs to remove') +}) + +const CreateLabelSchema = z.object({ + labelName: z.string().describe('Name of the label'), + labelColor: z.string().optional().describe('Color of the label (hex color code)') +}) + +class BaseGmailTool extends DynamicStructuredTool { + protected accessToken: string = '' + + constructor(args: any) { + super(args) + this.accessToken = args.accessToken ?? '' + } + + async makeGmailRequest(url: string, method: string = 'GET', body?: any, params?: any): Promise { + const headers = { + Authorization: `Bearer ${this.accessToken}`, + 'Content-Type': 'application/json', + ...this.headers + } + + const response = await fetch(url, { + method, + headers, + body: body ? JSON.stringify(body) : undefined + }) + + if (!response.ok) { + const errorText = await response.text() + throw new Error(`Gmail API Error ${response.status}: ${response.statusText} - ${errorText}`) + } + + const data = await response.text() + return data + TOOL_ARGS_PREFIX + JSON.stringify(params) + } + + createMimeMessage(to: string, subject?: string, body?: string, cc?: string, bcc?: string): string { + let message = '' + + message += `To: ${to}\r\n` + if (cc) message += `Cc: ${cc}\r\n` + if (bcc) message += `Bcc: ${bcc}\r\n` + if (subject) message += `Subject: ${subject}\r\n` + message += `MIME-Version: 1.0\r\n` + message += `Content-Type: text/html; charset=utf-8\r\n` + message += `Content-Transfer-Encoding: base64\r\n\r\n` + + if (body) { + message += Buffer.from(body, 'utf-8').toString('base64') + } + + return Buffer.from(message).toString('base64').replace(/\+/g, '-').replace(/\//g, '_').replace(/=+$/, '') + } +} + +// Draft Tools +class ListDraftsTool extends BaseGmailTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'list_drafts', + description: 'List drafts in Gmail mailbox', + schema: ListSchema, + baseUrl: 'https://gmail.googleapis.com/gmail/v1/users/me/drafts', + method: 'GET', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const queryParams = new URLSearchParams() + + if (params.maxResults) queryParams.append('maxResults', params.maxResults.toString()) + if (params.query) queryParams.append('q', params.query) + + const url = `https://gmail.googleapis.com/gmail/v1/users/me/drafts?${queryParams.toString()}` + + try { + const response = await this.makeGmailRequest(url, 'GET', undefined, params) + return response + } catch (error) { + return formatToolError(`Error listing drafts: ${error}`, params) + } + } +} + +class CreateDraftTool extends BaseGmailTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'create_draft', + description: 'Create a new draft in Gmail', + schema: CreateDraftSchema, + baseUrl: 'https://gmail.googleapis.com/gmail/v1/users/me/drafts', + method: 'POST', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const raw = this.createMimeMessage(params.to, params.subject, params.body, params.cc, params.bcc) + const draftData = { + message: { + raw: raw + } + } + + const url = 'https://gmail.googleapis.com/gmail/v1/users/me/drafts' + const response = await this.makeGmailRequest(url, 'POST', draftData, params) + return response + } catch (error) { + return formatToolError(`Error creating draft: ${error}`, params) + } + } +} + +class GetDraftTool extends BaseGmailTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'get_draft', + description: 'Get a specific draft from Gmail', + schema: GetByIdSchema, + baseUrl: 'https://gmail.googleapis.com/gmail/v1/users/me/drafts', + method: 'GET', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const draftId = params.draftId || params.id + + if (!draftId) { + return 'Error: Draft ID is required' + } + + try { + const url = `https://gmail.googleapis.com/gmail/v1/users/me/drafts/${draftId}` + const response = await this.makeGmailRequest(url, 'GET', undefined, params) + return response + } catch (error) { + return formatToolError(`Error getting draft: ${error}`, params) + } + } +} + +class UpdateDraftTool extends BaseGmailTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'update_draft', + description: 'Update a specific draft in Gmail', + schema: CreateDraftSchema, + baseUrl: 'https://gmail.googleapis.com/gmail/v1/users/me/drafts', + method: 'PUT', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const draftId = params.draftId || params.id + + if (!draftId) { + return 'Error: Draft ID is required' + } + + try { + const raw = this.createMimeMessage(params.to, params.subject, params.body, params.cc, params.bcc) + const draftData = { + message: { + raw: raw + } + } + + const url = `https://gmail.googleapis.com/gmail/v1/users/me/drafts/${draftId}` + const response = await this.makeGmailRequest(url, 'PUT', draftData, params) + return response + } catch (error) { + return formatToolError(`Error updating draft: ${error}`, params) + } + } +} + +class SendDraftTool extends BaseGmailTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'send_draft', + description: 'Send a specific draft from Gmail', + schema: GetByIdSchema, + baseUrl: 'https://gmail.googleapis.com/gmail/v1/users/me/drafts/send', + method: 'POST', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const draftId = params.draftId || params.id + + if (!draftId) { + return 'Error: Draft ID is required' + } + + try { + const url = 'https://gmail.googleapis.com/gmail/v1/users/me/drafts/send' + const response = await this.makeGmailRequest(url, 'POST', { id: draftId }, params) + return response + } catch (error) { + return formatToolError(`Error sending draft: ${error}`, params) + } + } +} + +class DeleteDraftTool extends BaseGmailTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'delete_draft', + description: 'Delete a specific draft from Gmail', + schema: GetByIdSchema, + baseUrl: 'https://gmail.googleapis.com/gmail/v1/users/me/drafts', + method: 'DELETE', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const draftId = params.draftId || params.id + + if (!draftId) { + return 'Error: Draft ID is required' + } + + try { + const url = `https://gmail.googleapis.com/gmail/v1/users/me/drafts/${draftId}` + await this.makeGmailRequest(url, 'DELETE', undefined, params) + return `Draft ${draftId} deleted successfully` + } catch (error) { + return formatToolError(`Error deleting draft: ${error}`, params) + } + } +} + +// Message Tools +class ListMessagesTool extends BaseGmailTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'list_messages', + description: 'List messages in Gmail mailbox', + schema: ListSchema, + baseUrl: 'https://gmail.googleapis.com/gmail/v1/users/me/messages', + method: 'GET', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const queryParams = new URLSearchParams() + + if (params.maxResults) queryParams.append('maxResults', params.maxResults.toString()) + if (params.query) queryParams.append('q', params.query) + + const url = `https://gmail.googleapis.com/gmail/v1/users/me/messages?${queryParams.toString()}` + + try { + const response = await this.makeGmailRequest(url, 'GET', undefined, params) + return response + } catch (error) { + return formatToolError(`Error listing messages: ${error}`, params) + } + } +} + +class GetMessageTool extends BaseGmailTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'get_message', + description: 'Get a specific message from Gmail', + schema: GetByIdSchema, + baseUrl: 'https://gmail.googleapis.com/gmail/v1/users/me/messages', + method: 'GET', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const messageId = params.messageId || params.id + + if (!messageId) { + return 'Error: Message ID is required' + } + + try { + const url = `https://gmail.googleapis.com/gmail/v1/users/me/messages/${messageId}` + const response = await this.makeGmailRequest(url, 'GET', undefined, params) + return response + } catch (error) { + return formatToolError(`Error getting message: ${error}`, params) + } + } +} + +class SendMessageTool extends BaseGmailTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'send_message', + description: 'Send a new message via Gmail', + schema: SendMessageSchema, + baseUrl: 'https://gmail.googleapis.com/gmail/v1/users/me/messages/send', + method: 'POST', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const raw = this.createMimeMessage(params.to, params.subject, params.body, params.cc, params.bcc) + const messageData = { + raw: raw + } + + const url = 'https://gmail.googleapis.com/gmail/v1/users/me/messages/send' + const response = await this.makeGmailRequest(url, 'POST', messageData, params) + return response + } catch (error) { + return formatToolError(`Error sending message: ${error}`, params) + } + } +} + +class ModifyMessageTool extends BaseGmailTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'modify_message', + description: 'Modify labels on a message in Gmail', + schema: ModifySchema, + baseUrl: 'https://gmail.googleapis.com/gmail/v1/users/me/messages', + method: 'POST', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const messageId = params.messageId || params.id + + if (!messageId) { + return 'Error: Message ID is required' + } + + try { + const modifyData: any = {} + if (params.addLabelIds && params.addLabelIds.length > 0) { + modifyData.addLabelIds = params.addLabelIds + } + if (params.removeLabelIds && params.removeLabelIds.length > 0) { + modifyData.removeLabelIds = params.removeLabelIds + } + + const url = `https://gmail.googleapis.com/gmail/v1/users/me/messages/${messageId}/modify` + const response = await this.makeGmailRequest(url, 'POST', modifyData, params) + return response + } catch (error) { + return formatToolError(`Error modifying message: ${error}`, params) + } + } +} + +class TrashMessageTool extends BaseGmailTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'trash_message', + description: 'Move a message to trash in Gmail', + schema: GetByIdSchema, + baseUrl: 'https://gmail.googleapis.com/gmail/v1/users/me/messages', + method: 'POST', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const messageId = params.messageId || params.id + + if (!messageId) { + return 'Error: Message ID is required' + } + + try { + const url = `https://gmail.googleapis.com/gmail/v1/users/me/messages/${messageId}/trash` + const response = await this.makeGmailRequest(url, 'POST', undefined, params) + return response + } catch (error) { + return formatToolError(`Error moving message to trash: ${error}`, params) + } + } +} + +class UntrashMessageTool extends BaseGmailTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'untrash_message', + description: 'Remove a message from trash in Gmail', + schema: GetByIdSchema, + baseUrl: 'https://gmail.googleapis.com/gmail/v1/users/me/messages', + method: 'POST', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const messageId = params.messageId || params.id + + if (!messageId) { + return 'Error: Message ID is required' + } + + try { + const url = `https://gmail.googleapis.com/gmail/v1/users/me/messages/${messageId}/untrash` + const response = await this.makeGmailRequest(url, 'POST', undefined, params) + return response + } catch (error) { + return formatToolError(`Error removing message from trash: ${error}`, params) + } + } +} + +class DeleteMessageTool extends BaseGmailTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'delete_message', + description: 'Permanently delete a message from Gmail', + schema: GetByIdSchema, + baseUrl: 'https://gmail.googleapis.com/gmail/v1/users/me/messages', + method: 'DELETE', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const messageId = params.messageId || params.id + + if (!messageId) { + return 'Error: Message ID is required' + } + + try { + const url = `https://gmail.googleapis.com/gmail/v1/users/me/messages/${messageId}` + await this.makeGmailRequest(url, 'DELETE', undefined, params) + return `Message ${messageId} deleted successfully` + } catch (error) { + return formatToolError(`Error deleting message: ${error}`, params) + } + } +} + +// Label Tools +class ListLabelsTool extends BaseGmailTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'list_labels', + description: 'List labels in Gmail mailbox', + schema: z.object({}), + baseUrl: 'https://gmail.googleapis.com/gmail/v1/users/me/labels', + method: 'GET', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(): Promise { + try { + const url = 'https://gmail.googleapis.com/gmail/v1/users/me/labels' + const response = await this.makeGmailRequest(url, 'GET', undefined, {}) + return response + } catch (error) { + return formatToolError(`Error listing labels: ${error}`, {}) + } + } +} + +class GetLabelTool extends BaseGmailTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'get_label', + description: 'Get a specific label from Gmail', + schema: GetByIdSchema, + baseUrl: 'https://gmail.googleapis.com/gmail/v1/users/me/labels', + method: 'GET', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const labelId = params.labelId || params.id + + if (!labelId) { + return 'Error: Label ID is required' + } + + try { + const url = `https://gmail.googleapis.com/gmail/v1/users/me/labels/${labelId}` + const response = await this.makeGmailRequest(url, 'GET', undefined, params) + return response + } catch (error) { + return formatToolError(`Error getting label: ${error}`, params) + } + } +} + +class CreateLabelTool extends BaseGmailTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'create_label', + description: 'Create a new label in Gmail', + schema: CreateLabelSchema, + baseUrl: 'https://gmail.googleapis.com/gmail/v1/users/me/labels', + method: 'POST', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + if (!params.labelName) { + return 'Error: Label name is required' + } + + try { + const labelData: any = { + name: params.labelName, + labelListVisibility: 'labelShow', + messageListVisibility: 'show' + } + + if (params.labelColor) { + labelData.color = { + backgroundColor: params.labelColor + } + } + + const url = 'https://gmail.googleapis.com/gmail/v1/users/me/labels' + const response = await this.makeGmailRequest(url, 'POST', labelData, params) + return response + } catch (error) { + return formatToolError(`Error creating label: ${error}`, params) + } + } +} + +class UpdateLabelTool extends BaseGmailTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'update_label', + description: 'Update a label in Gmail', + schema: CreateLabelSchema, + baseUrl: 'https://gmail.googleapis.com/gmail/v1/users/me/labels', + method: 'PUT', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const labelId = params.labelId || params.id + + if (!labelId) { + return 'Error: Label ID is required' + } + + try { + const labelData: any = {} + if (params.labelName) { + labelData.name = params.labelName + } + if (params.labelColor) { + labelData.color = { + backgroundColor: params.labelColor + } + } + + const url = `https://gmail.googleapis.com/gmail/v1/users/me/labels/${labelId}` + const response = await this.makeGmailRequest(url, 'PUT', labelData, params) + return response + } catch (error) { + return formatToolError(`Error updating label: ${error}`, params) + } + } +} + +class DeleteLabelTool extends BaseGmailTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'delete_label', + description: 'Delete a label from Gmail', + schema: GetByIdSchema, + baseUrl: 'https://gmail.googleapis.com/gmail/v1/users/me/labels', + method: 'DELETE', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const labelId = params.labelId || params.id + + if (!labelId) { + return 'Error: Label ID is required' + } + + try { + const url = `https://gmail.googleapis.com/gmail/v1/users/me/labels/${labelId}` + await this.makeGmailRequest(url, 'DELETE', undefined, params) + return `Label ${labelId} deleted successfully` + } catch (error) { + return formatToolError(`Error deleting label: ${error}`, params) + } + } +} + +// Thread Tools +class ListThreadsTool extends BaseGmailTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'list_threads', + description: 'List threads in Gmail mailbox', + schema: ListSchema, + baseUrl: 'https://gmail.googleapis.com/gmail/v1/users/me/threads', + method: 'GET', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const queryParams = new URLSearchParams() + + if (params.maxResults) queryParams.append('maxResults', params.maxResults.toString()) + if (params.query) queryParams.append('q', params.query) + + const url = `https://gmail.googleapis.com/gmail/v1/users/me/threads?${queryParams.toString()}` + + try { + const response = await this.makeGmailRequest(url, 'GET', undefined, params) + return response + } catch (error) { + return formatToolError(`Error listing threads: ${error}`, params) + } + } +} + +class GetThreadTool extends BaseGmailTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'get_thread', + description: 'Get a specific thread from Gmail', + schema: GetByIdSchema, + baseUrl: 'https://gmail.googleapis.com/gmail/v1/users/me/threads', + method: 'GET', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const threadId = params.threadId || params.id + + if (!threadId) { + return 'Error: Thread ID is required' + } + + try { + const url = `https://gmail.googleapis.com/gmail/v1/users/me/threads/${threadId}` + const response = await this.makeGmailRequest(url, 'GET', undefined, params) + return response + } catch (error) { + return formatToolError(`Error getting thread: ${error}`, params) + } + } +} + +class ModifyThreadTool extends BaseGmailTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'modify_thread', + description: 'Modify labels on a thread in Gmail', + schema: ModifySchema, + baseUrl: 'https://gmail.googleapis.com/gmail/v1/users/me/threads', + method: 'POST', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const threadId = params.threadId || params.id + + if (!threadId) { + return 'Error: Thread ID is required' + } + + try { + const modifyData: any = {} + if (params.addLabelIds && params.addLabelIds.length > 0) { + modifyData.addLabelIds = params.addLabelIds + } + if (params.removeLabelIds && params.removeLabelIds.length > 0) { + modifyData.removeLabelIds = params.removeLabelIds + } + + const url = `https://gmail.googleapis.com/gmail/v1/users/me/threads/${threadId}/modify` + const response = await this.makeGmailRequest(url, 'POST', modifyData, params) + return response + } catch (error) { + return formatToolError(`Error modifying thread: ${error}`, params) + } + } +} + +class TrashThreadTool extends BaseGmailTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'trash_thread', + description: 'Move a thread to trash in Gmail', + schema: GetByIdSchema, + baseUrl: 'https://gmail.googleapis.com/gmail/v1/users/me/threads', + method: 'POST', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const threadId = params.threadId || params.id + + if (!threadId) { + return 'Error: Thread ID is required' + } + + try { + const url = `https://gmail.googleapis.com/gmail/v1/users/me/threads/${threadId}/trash` + const response = await this.makeGmailRequest(url, 'POST', undefined, params) + return response + } catch (error) { + return formatToolError(`Error moving thread to trash: ${error}`, params) + } + } +} + +class UntrashThreadTool extends BaseGmailTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'untrash_thread', + description: 'Remove a thread from trash in Gmail', + schema: GetByIdSchema, + baseUrl: 'https://gmail.googleapis.com/gmail/v1/users/me/threads', + method: 'POST', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const threadId = params.threadId || params.id + + if (!threadId) { + return 'Error: Thread ID is required' + } + + try { + const url = `https://gmail.googleapis.com/gmail/v1/users/me/threads/${threadId}/untrash` + const response = await this.makeGmailRequest(url, 'POST', undefined, params) + return response + } catch (error) { + return formatToolError(`Error removing thread from trash: ${error}`, params) + } + } +} + +class DeleteThreadTool extends BaseGmailTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'delete_thread', + description: 'Permanently delete a thread from Gmail', + schema: GetByIdSchema, + baseUrl: 'https://gmail.googleapis.com/gmail/v1/users/me/threads', + method: 'DELETE', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const threadId = params.threadId || params.id + + if (!threadId) { + return 'Error: Thread ID is required' + } + + try { + const url = `https://gmail.googleapis.com/gmail/v1/users/me/threads/${threadId}` + await this.makeGmailRequest(url, 'DELETE', undefined, params) + return `Thread ${threadId} deleted successfully` + } catch (error) { + return formatToolError(`Error deleting thread: ${error}`, params) + } + } +} + +export const createGmailTools = (args?: RequestParameters): DynamicStructuredTool[] => { + const tools: DynamicStructuredTool[] = [] + const actions = args?.actions || [] + const accessToken = args?.accessToken || '' + const defaultParams = args?.defaultParams || {} + + // Draft tools + if (actions.includes('listDrafts')) { + tools.push(new ListDraftsTool({ accessToken, defaultParams })) + } + + if (actions.includes('createDraft')) { + tools.push(new CreateDraftTool({ accessToken, defaultParams })) + } + + if (actions.includes('getDraft')) { + tools.push(new GetDraftTool({ accessToken, defaultParams })) + } + + if (actions.includes('updateDraft')) { + tools.push(new UpdateDraftTool({ accessToken, defaultParams })) + } + + if (actions.includes('sendDraft')) { + tools.push(new SendDraftTool({ accessToken, defaultParams })) + } + + if (actions.includes('deleteDraft')) { + tools.push(new DeleteDraftTool({ accessToken, defaultParams })) + } + + // Message tools + if (actions.includes('listMessages')) { + tools.push(new ListMessagesTool({ accessToken, defaultParams })) + } + + if (actions.includes('getMessage')) { + tools.push(new GetMessageTool({ accessToken, defaultParams })) + } + + if (actions.includes('sendMessage')) { + tools.push(new SendMessageTool({ accessToken, defaultParams })) + } + + if (actions.includes('modifyMessage')) { + tools.push(new ModifyMessageTool({ accessToken, defaultParams })) + } + + if (actions.includes('trashMessage')) { + tools.push(new TrashMessageTool({ accessToken, defaultParams })) + } + + if (actions.includes('untrashMessage')) { + tools.push(new UntrashMessageTool({ accessToken, defaultParams })) + } + + if (actions.includes('deleteMessage')) { + tools.push(new DeleteMessageTool({ accessToken, defaultParams })) + } + + // Label tools + if (actions.includes('listLabels')) { + tools.push(new ListLabelsTool({ accessToken, defaultParams })) + } + + if (actions.includes('getLabel')) { + tools.push(new GetLabelTool({ accessToken, defaultParams })) + } + + if (actions.includes('createLabel')) { + tools.push(new CreateLabelTool({ accessToken, defaultParams })) + } + + if (actions.includes('updateLabel')) { + tools.push(new UpdateLabelTool({ accessToken, defaultParams })) + } + + if (actions.includes('deleteLabel')) { + tools.push(new DeleteLabelTool({ accessToken, defaultParams })) + } + + // Thread tools + if (actions.includes('listThreads')) { + tools.push(new ListThreadsTool({ accessToken, defaultParams })) + } + + if (actions.includes('getThread')) { + tools.push(new GetThreadTool({ accessToken, defaultParams })) + } + + if (actions.includes('modifyThread')) { + tools.push(new ModifyThreadTool({ accessToken, defaultParams })) + } + + if (actions.includes('trashThread')) { + tools.push(new TrashThreadTool({ accessToken, defaultParams })) + } + + if (actions.includes('untrashThread')) { + tools.push(new UntrashThreadTool({ accessToken, defaultParams })) + } + + if (actions.includes('deleteThread')) { + tools.push(new DeleteThreadTool({ accessToken, defaultParams })) + } + + return tools +} diff --git a/packages/components/nodes/tools/Gmail/gmail.svg b/packages/components/nodes/tools/Gmail/gmail.svg new file mode 100644 index 000000000..3dceea456 --- /dev/null +++ b/packages/components/nodes/tools/Gmail/gmail.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/components/nodes/tools/GoogleCalendar/GoogleCalendar.ts b/packages/components/nodes/tools/GoogleCalendar/GoogleCalendar.ts new file mode 100644 index 000000000..7498a1957 --- /dev/null +++ b/packages/components/nodes/tools/GoogleCalendar/GoogleCalendar.ts @@ -0,0 +1,637 @@ +import { convertMultiOptionsToStringArray, getCredentialData, getCredentialParam, refreshOAuth2Token } from '../../../src/utils' +import { createGoogleCalendarTools } from './core' +import type { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' + +class GoogleCalendar_Tools implements INode { + label: string + name: string + version: number + type: string + icon: string + category: string + description: string + baseClasses: string[] + credential: INodeParams + inputs: INodeParams[] + + constructor() { + this.label = 'Google Calendar' + this.name = 'googleCalendarTool' + this.version = 1.0 + this.type = 'GoogleCalendar' + this.icon = 'google-calendar.svg' + this.category = 'Tools' + this.description = 'Perform Google Calendar operations such as managing events, calendars, and checking availability' + this.baseClasses = ['Tool'] + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + credentialNames: ['googleCalendarOAuth2'] + } + this.inputs = [ + { + label: 'Type', + name: 'calendarType', + type: 'options', + description: 'Type of Google Calendar operation', + options: [ + { + label: 'Event', + name: 'event' + }, + { + label: 'Calendar', + name: 'calendar' + }, + { + label: 'Freebusy', + name: 'freebusy' + } + ] + }, + // Event Actions + { + label: 'Event Actions', + name: 'eventActions', + type: 'multiOptions', + description: 'Actions to perform', + options: [ + { + label: 'List Events', + name: 'listEvents' + }, + { + label: 'Create Event', + name: 'createEvent' + }, + { + label: 'Get Event', + name: 'getEvent' + }, + { + label: 'Update Event', + name: 'updateEvent' + }, + { + label: 'Delete Event', + name: 'deleteEvent' + }, + { + label: 'Quick Add Event', + name: 'quickAddEvent' + } + ], + show: { + calendarType: ['event'] + } + }, + // Calendar Actions + { + label: 'Calendar Actions', + name: 'calendarActions', + type: 'multiOptions', + description: 'Actions to perform', + options: [ + { + label: 'List Calendars', + name: 'listCalendars' + }, + { + label: 'Create Calendar', + name: 'createCalendar' + }, + { + label: 'Get Calendar', + name: 'getCalendar' + }, + { + label: 'Update Calendar', + name: 'updateCalendar' + }, + { + label: 'Delete Calendar', + name: 'deleteCalendar' + }, + { + label: 'Clear Calendar', + name: 'clearCalendar' + } + ], + show: { + calendarType: ['calendar'] + } + }, + // Freebusy Actions + { + label: 'Freebusy Actions', + name: 'freebusyActions', + type: 'multiOptions', + description: 'Actions to perform', + options: [ + { + label: 'Query Freebusy', + name: 'queryFreebusy' + } + ], + show: { + calendarType: ['freebusy'] + } + }, + // Event Parameters + { + label: 'Calendar ID', + name: 'calendarId', + type: 'string', + description: 'Calendar ID (use "primary" for primary calendar)', + default: 'primary', + show: { + calendarType: ['event'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Event ID', + name: 'eventId', + type: 'string', + description: 'Event ID for operations on specific events', + show: { + eventActions: ['getEvent', 'updateEvent', 'deleteEvent'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Summary', + name: 'summary', + type: 'string', + description: 'Event title/summary', + show: { + eventActions: ['createEvent', 'updateEvent'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Description', + name: 'description', + type: 'string', + description: 'Event description', + show: { + eventActions: ['createEvent', 'updateEvent'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Location', + name: 'location', + type: 'string', + description: 'Event location', + show: { + eventActions: ['createEvent', 'updateEvent'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Start Date Time', + name: 'startDateTime', + type: 'string', + description: 'Event start time (ISO 8601 format: 2023-12-25T10:00:00)', + show: { + eventActions: ['createEvent', 'updateEvent'] + }, + additionalParams: true, + optional: true + }, + { + label: 'End Date Time', + name: 'endDateTime', + type: 'string', + description: 'Event end time (ISO 8601 format: 2023-12-25T11:00:00)', + show: { + eventActions: ['createEvent', 'updateEvent'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Time Zone', + name: 'timeZone', + type: 'string', + description: 'Time zone (e.g., America/New_York)', + show: { + eventActions: ['createEvent', 'updateEvent'] + }, + additionalParams: true, + optional: true + }, + { + label: 'All Day Event', + name: 'allDay', + type: 'boolean', + description: 'Whether this is an all-day event', + show: { + eventActions: ['createEvent', 'updateEvent'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Start Date', + name: 'startDate', + type: 'string', + description: 'Start date for all-day events (YYYY-MM-DD format)', + show: { + eventActions: ['createEvent', 'updateEvent'] + }, + additionalParams: true, + optional: true + }, + { + label: 'End Date', + name: 'endDate', + type: 'string', + description: 'End date for all-day events (YYYY-MM-DD format)', + show: { + eventActions: ['createEvent', 'updateEvent'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Attendees', + name: 'attendees', + type: 'string', + description: 'Comma-separated list of attendee emails', + show: { + eventActions: ['createEvent', 'updateEvent'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Send Updates to', + name: 'sendUpdates', + type: 'options', + description: 'Send Updates to attendees', + options: [ + { label: 'All', name: 'all' }, + { label: 'External Only', name: 'externalOnly' }, + { label: 'None', name: 'none' } + ], + show: { + eventActions: ['createEvent', 'updateEvent'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Recurrence Rules', + name: 'recurrence', + type: 'string', + description: 'Recurrence rules (RRULE format)', + show: { + eventActions: ['createEvent', 'updateEvent'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Reminder Minutes', + name: 'reminderMinutes', + type: 'number', + description: 'Minutes before event to send reminder', + show: { + eventActions: ['createEvent', 'updateEvent'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Visibility', + name: 'visibility', + type: 'options', + description: 'Event visibility', + options: [ + { label: 'Default', name: 'default' }, + { label: 'Public', name: 'public' }, + { label: 'Private', name: 'private' }, + { label: 'Confidential', name: 'confidential' } + ], + show: { + eventActions: ['createEvent', 'updateEvent'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Quick Add Text', + name: 'quickAddText', + type: 'string', + description: 'Natural language text for quick event creation (e.g., "Lunch with John tomorrow at 12pm")', + show: { + eventActions: ['quickAddEvent'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Time Min', + name: 'timeMin', + type: 'string', + description: 'Lower bound for event search (ISO 8601 format)', + show: { + eventActions: ['listEvents'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Time Max', + name: 'timeMax', + type: 'string', + description: 'Upper bound for event search (ISO 8601 format)', + show: { + eventActions: ['listEvents'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Max Results', + name: 'maxResults', + type: 'number', + description: 'Maximum number of events to return', + default: 250, + show: { + eventActions: ['listEvents'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Single Events', + name: 'singleEvents', + type: 'boolean', + description: 'Whether to expand recurring events into instances', + default: true, + show: { + eventActions: ['listEvents'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Order By', + name: 'orderBy', + type: 'options', + description: 'Order of events returned', + options: [ + { label: 'Start Time', name: 'startTime' }, + { label: 'Updated', name: 'updated' } + ], + show: { + eventActions: ['listEvents'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Query', + name: 'query', + type: 'string', + description: 'Free text search terms', + show: { + eventActions: ['listEvents'] + }, + additionalParams: true, + optional: true + }, + // Calendar Parameters + { + label: 'Calendar ID', + name: 'calendarIdForCalendar', + type: 'string', + description: 'Calendar ID for operations on specific calendars', + show: { + calendarActions: ['getCalendar', 'updateCalendar', 'deleteCalendar', 'clearCalendar'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Calendar Summary', + name: 'calendarSummary', + type: 'string', + description: 'Calendar title/name', + show: { + calendarActions: ['createCalendar', 'updateCalendar'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Calendar Description', + name: 'calendarDescription', + type: 'string', + description: 'Calendar description', + show: { + calendarActions: ['createCalendar', 'updateCalendar'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Calendar Location', + name: 'calendarLocation', + type: 'string', + description: 'Calendar location', + show: { + calendarActions: ['createCalendar', 'updateCalendar'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Calendar Time Zone', + name: 'calendarTimeZone', + type: 'string', + description: 'Calendar time zone (e.g., America/New_York)', + show: { + calendarActions: ['createCalendar', 'updateCalendar'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Show Hidden', + name: 'showHidden', + type: 'boolean', + description: 'Whether to show hidden calendars', + show: { + calendarActions: ['listCalendars'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Min Access Role', + name: 'minAccessRole', + type: 'options', + description: 'Minimum access role for calendar list', + options: [ + { label: 'Free/Busy Reader', name: 'freeBusyReader' }, + { label: 'Reader', name: 'reader' }, + { label: 'Writer', name: 'writer' }, + { label: 'Owner', name: 'owner' } + ], + show: { + calendarActions: ['listCalendars'] + }, + additionalParams: true, + optional: true + }, + // Freebusy Parameters + { + label: 'Time Min', + name: 'freebusyTimeMin', + type: 'string', + description: 'Lower bound for freebusy query (ISO 8601 format)', + show: { + freebusyActions: ['queryFreebusy'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Time Max', + name: 'freebusyTimeMax', + type: 'string', + description: 'Upper bound for freebusy query (ISO 8601 format)', + show: { + freebusyActions: ['queryFreebusy'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Calendar IDs', + name: 'calendarIds', + type: 'string', + description: 'Comma-separated list of calendar IDs to check for free/busy info', + show: { + freebusyActions: ['queryFreebusy'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Group Expansion Max', + name: 'groupExpansionMax', + type: 'number', + description: 'Maximum number of calendars for which FreeBusy information is to be provided', + show: { + freebusyActions: ['queryFreebusy'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Calendar Expansion Max', + name: 'calendarExpansionMax', + type: 'number', + description: 'Maximum number of events that can be expanded for each calendar', + show: { + freebusyActions: ['queryFreebusy'] + }, + additionalParams: true, + optional: true + } + ] + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const calendarType = nodeData.inputs?.calendarType as string + + let credentialData = await getCredentialData(nodeData.credential ?? '', options) + credentialData = await refreshOAuth2Token(nodeData.credential ?? '', credentialData, options) + const accessToken = getCredentialParam('access_token', credentialData, nodeData) + + if (!accessToken) { + throw new Error('No access token found in credential') + } + + // Get all actions based on type + let actions: string[] = [] + + if (calendarType === 'event') { + actions = convertMultiOptionsToStringArray(nodeData.inputs?.eventActions) + } else if (calendarType === 'calendar') { + actions = convertMultiOptionsToStringArray(nodeData.inputs?.calendarActions) + } else if (calendarType === 'freebusy') { + actions = convertMultiOptionsToStringArray(nodeData.inputs?.freebusyActions) + } + + const defaultParams = this.transformNodeInputsToToolArgs(nodeData) + const tools = createGoogleCalendarTools({ + accessToken, + actions, + defaultParams + }) + + return tools + } + + transformNodeInputsToToolArgs(nodeData: INodeData): Record { + // Collect default parameters from inputs + const defaultParams: Record = {} + + // Event parameters + if (nodeData.inputs?.calendarId) defaultParams.calendarId = nodeData.inputs.calendarId + if (nodeData.inputs?.eventId) defaultParams.eventId = nodeData.inputs.eventId + if (nodeData.inputs?.summary) defaultParams.summary = nodeData.inputs.summary + if (nodeData.inputs?.description) defaultParams.description = nodeData.inputs.description + if (nodeData.inputs?.location) defaultParams.location = nodeData.inputs.location + if (nodeData.inputs?.startDateTime) defaultParams.startDateTime = nodeData.inputs.startDateTime + if (nodeData.inputs?.endDateTime) defaultParams.endDateTime = nodeData.inputs.endDateTime + if (nodeData.inputs?.timeZone) defaultParams.timeZone = nodeData.inputs.timeZone + if (nodeData.inputs?.allDay !== undefined) defaultParams.allDay = nodeData.inputs.allDay + if (nodeData.inputs?.startDate) defaultParams.startDate = nodeData.inputs.startDate + if (nodeData.inputs?.endDate) defaultParams.endDate = nodeData.inputs.endDate + if (nodeData.inputs?.attendees) defaultParams.attendees = nodeData.inputs.attendees + if (nodeData.inputs?.sendUpdates) defaultParams.sendUpdates = nodeData.inputs.sendUpdates + if (nodeData.inputs?.recurrence) defaultParams.recurrence = nodeData.inputs.recurrence + if (nodeData.inputs?.reminderMinutes) defaultParams.reminderMinutes = nodeData.inputs.reminderMinutes + if (nodeData.inputs?.visibility) defaultParams.visibility = nodeData.inputs.visibility + if (nodeData.inputs?.quickAddText) defaultParams.quickAddText = nodeData.inputs.quickAddText + if (nodeData.inputs?.timeMin) defaultParams.timeMin = nodeData.inputs.timeMin + if (nodeData.inputs?.timeMax) defaultParams.timeMax = nodeData.inputs.timeMax + if (nodeData.inputs?.maxResults) defaultParams.maxResults = nodeData.inputs.maxResults + if (nodeData.inputs?.singleEvents !== undefined) defaultParams.singleEvents = nodeData.inputs.singleEvents + if (nodeData.inputs?.orderBy) defaultParams.orderBy = nodeData.inputs.orderBy + if (nodeData.inputs?.query) defaultParams.query = nodeData.inputs.query + + // Calendar parameters + if (nodeData.inputs?.calendarIdForCalendar) defaultParams.calendarIdForCalendar = nodeData.inputs.calendarIdForCalendar + if (nodeData.inputs?.calendarSummary) defaultParams.calendarSummary = nodeData.inputs.calendarSummary + if (nodeData.inputs?.calendarDescription) defaultParams.calendarDescription = nodeData.inputs.calendarDescription + if (nodeData.inputs?.calendarLocation) defaultParams.calendarLocation = nodeData.inputs.calendarLocation + if (nodeData.inputs?.calendarTimeZone) defaultParams.calendarTimeZone = nodeData.inputs.calendarTimeZone + if (nodeData.inputs?.showHidden !== undefined) defaultParams.showHidden = nodeData.inputs.showHidden + if (nodeData.inputs?.minAccessRole) defaultParams.minAccessRole = nodeData.inputs.minAccessRole + + // Freebusy parameters + if (nodeData.inputs?.freebusyTimeMin) defaultParams.freebusyTimeMin = nodeData.inputs.freebusyTimeMin + if (nodeData.inputs?.freebusyTimeMax) defaultParams.freebusyTimeMax = nodeData.inputs.freebusyTimeMax + if (nodeData.inputs?.calendarIds) defaultParams.calendarIds = nodeData.inputs.calendarIds + if (nodeData.inputs?.groupExpansionMax) defaultParams.groupExpansionMax = nodeData.inputs.groupExpansionMax + if (nodeData.inputs?.calendarExpansionMax) defaultParams.calendarExpansionMax = nodeData.inputs.calendarExpansionMax + + return defaultParams + } +} + +module.exports = { nodeClass: GoogleCalendar_Tools } diff --git a/packages/components/nodes/tools/GoogleCalendar/core.ts b/packages/components/nodes/tools/GoogleCalendar/core.ts new file mode 100644 index 000000000..00f21d85d --- /dev/null +++ b/packages/components/nodes/tools/GoogleCalendar/core.ts @@ -0,0 +1,808 @@ +import { z } from 'zod' +import fetch from 'node-fetch' +import { DynamicStructuredTool } from '../OpenAPIToolkit/core' +import { TOOL_ARGS_PREFIX, formatToolError } from '../../../src/agents' + +export const desc = `Use this when you want to access Google Calendar API for managing events and calendars` + +export interface Headers { + [key: string]: string +} + +export interface Body { + [key: string]: any +} + +export interface RequestParameters { + headers?: Headers + body?: Body + url?: string + description?: string + name?: string + actions?: string[] + accessToken?: string + defaultParams?: any +} + +// Define schemas for different Google Calendar operations + +// Event Schemas +const ListEventsSchema = z.object({ + calendarId: z.string().default('primary').describe('Calendar ID (use "primary" for primary calendar)'), + timeMin: z.string().optional().describe('Lower bound for event search (RFC3339 timestamp)'), + timeMax: z.string().optional().describe('Upper bound for event search (RFC3339 timestamp)'), + maxResults: z.number().optional().default(250).describe('Maximum number of events to return'), + singleEvents: z.boolean().optional().default(true).describe('Whether to expand recurring events into instances'), + orderBy: z.enum(['startTime', 'updated']).optional().describe('Order of events returned'), + query: z.string().optional().describe('Free text search terms') +}) + +const CreateEventSchema = z.object({ + calendarId: z.string().default('primary').describe('Calendar ID where the event will be created'), + summary: z.string().describe('Event title/summary'), + description: z.string().optional().describe('Event description'), + location: z.string().optional().describe('Event location'), + startDateTime: z.string().optional().describe('Event start time (ISO 8601 format)'), + endDateTime: z.string().optional().describe('Event end time (ISO 8601 format)'), + startDate: z.string().optional().describe('Start date for all-day events (YYYY-MM-DD)'), + endDate: z.string().optional().describe('End date for all-day events (YYYY-MM-DD)'), + timeZone: z.string().optional().describe('Time zone (e.g., America/New_York)'), + attendees: z.string().optional().describe('Comma-separated list of attendee emails'), + sendUpdates: z.enum(['all', 'externalOnly', 'none']).optional().default('all').describe('Whether to send notifications to attendees'), + recurrence: z.string().optional().describe('Recurrence rules (RRULE format)'), + reminderMinutes: z.number().optional().describe('Minutes before event to send reminder'), + visibility: z.enum(['default', 'public', 'private', 'confidential']).optional().describe('Event visibility') +}) + +const GetEventSchema = z.object({ + calendarId: z.string().default('primary').describe('Calendar ID'), + eventId: z.string().describe('Event ID') +}) + +const UpdateEventSchema = z.object({ + calendarId: z.string().default('primary').describe('Calendar ID'), + eventId: z.string().describe('Event ID'), + summary: z.string().optional().describe('Updated event title/summary'), + description: z.string().optional().describe('Updated event description'), + location: z.string().optional().describe('Updated event location'), + startDateTime: z.string().optional().describe('Updated event start time (ISO 8601 format)'), + endDateTime: z.string().optional().describe('Updated event end time (ISO 8601 format)'), + startDate: z.string().optional().describe('Updated start date for all-day events (YYYY-MM-DD)'), + endDate: z.string().optional().describe('Updated end date for all-day events (YYYY-MM-DD)'), + timeZone: z.string().optional().describe('Updated time zone'), + attendees: z.string().optional().describe('Updated comma-separated list of attendee emails'), + sendUpdates: z.enum(['all', 'externalOnly', 'none']).optional().default('all').describe('Whether to send notifications to attendees'), + recurrence: z.string().optional().describe('Updated recurrence rules'), + reminderMinutes: z.number().optional().describe('Updated reminder minutes'), + visibility: z.enum(['default', 'public', 'private', 'confidential']).optional().describe('Updated event visibility') +}) + +const DeleteEventSchema = z.object({ + calendarId: z.string().default('primary').describe('Calendar ID'), + eventId: z.string().describe('Event ID to delete') +}) + +const QuickAddEventSchema = z.object({ + calendarId: z.string().default('primary').describe('Calendar ID'), + quickAddText: z.string().describe('Natural language text for quick event creation') +}) + +// Calendar Schemas +const ListCalendarsSchema = z.object({ + showHidden: z.boolean().optional().describe('Whether to show hidden calendars'), + minAccessRole: z.enum(['freeBusyReader', 'reader', 'writer', 'owner']).optional().describe('Minimum access role') +}) + +const CreateCalendarSchema = z.object({ + summary: z.string().describe('Calendar title/name'), + description: z.string().optional().describe('Calendar description'), + location: z.string().optional().describe('Calendar location'), + timeZone: z.string().optional().describe('Calendar time zone (e.g., America/New_York)') +}) + +const GetCalendarSchema = z.object({ + calendarId: z.string().describe('Calendar ID') +}) + +const UpdateCalendarSchema = z.object({ + calendarId: z.string().describe('Calendar ID'), + summary: z.string().optional().describe('Updated calendar title/name'), + description: z.string().optional().describe('Updated calendar description'), + location: z.string().optional().describe('Updated calendar location'), + timeZone: z.string().optional().describe('Updated calendar time zone') +}) + +const DeleteCalendarSchema = z.object({ + calendarId: z.string().describe('Calendar ID to delete') +}) + +const ClearCalendarSchema = z.object({ + calendarId: z.string().describe('Calendar ID to clear (removes all events)') +}) + +// Freebusy Schemas +const QueryFreebusySchema = z.object({ + timeMin: z.string().describe('Lower bound for freebusy query (RFC3339 timestamp)'), + timeMax: z.string().describe('Upper bound for freebusy query (RFC3339 timestamp)'), + calendarIds: z.string().describe('Comma-separated list of calendar IDs to check for free/busy info'), + groupExpansionMax: z.number().optional().describe('Maximum number of calendars for which FreeBusy information is to be provided'), + calendarExpansionMax: z.number().optional().describe('Maximum number of events that can be expanded for each calendar') +}) + +class BaseGoogleCalendarTool extends DynamicStructuredTool { + protected accessToken: string = '' + + constructor(args: any) { + super(args) + this.accessToken = args.accessToken ?? '' + } + + async makeGoogleCalendarRequest({ + endpoint, + method = 'GET', + body, + params + }: { + endpoint: string + method?: string + body?: any + params?: any + }): Promise { + const url = `https://www.googleapis.com/calendar/v3/${endpoint}` + + const headers = { + Authorization: `Bearer ${this.accessToken}`, + 'Content-Type': 'application/json', + Accept: 'application/json', + ...this.headers + } + + const response = await fetch(url, { + method, + headers, + body: body ? JSON.stringify(body) : undefined + }) + + if (!response.ok) { + const errorText = await response.text() + throw new Error(`Google Calendar API Error ${response.status}: ${response.statusText} - ${errorText}`) + } + + const data = await response.text() + return data + TOOL_ARGS_PREFIX + JSON.stringify(params) + } +} + +// Event Tools +class ListEventsTool extends BaseGoogleCalendarTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'list_events', + description: 'List events from Google Calendar', + schema: ListEventsSchema, + baseUrl: '', + method: 'GET', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const queryParams = new URLSearchParams() + + if (params.timeMin) queryParams.append('timeMin', params.timeMin) + if (params.timeMax) queryParams.append('timeMax', params.timeMax) + if (params.maxResults) queryParams.append('maxResults', params.maxResults.toString()) + if (params.singleEvents !== undefined) queryParams.append('singleEvents', params.singleEvents.toString()) + if (params.orderBy) queryParams.append('orderBy', params.orderBy) + if (params.query) queryParams.append('q', params.query) + + const endpoint = `calendars/${encodeURIComponent(params.calendarId)}/events?${queryParams.toString()}` + + try { + const response = await this.makeGoogleCalendarRequest({ endpoint, params }) + return response + } catch (error) { + return formatToolError(`Error listing events: ${error}`, params) + } + } +} + +class CreateEventTool extends BaseGoogleCalendarTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'create_event', + description: 'Create a new event in Google Calendar', + schema: CreateEventSchema, + baseUrl: '', + method: 'POST', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const eventData: any = { + summary: params.summary + } + + if (params.description) eventData.description = params.description + if (params.location) eventData.location = params.location + + // Handle date/time + if (params.startDate && params.endDate) { + // All-day event + eventData.start = { date: params.startDate } + eventData.end = { date: params.endDate } + } else if (params.startDateTime && params.endDateTime) { + // Timed event + eventData.start = { + dateTime: params.startDateTime, + timeZone: params.timeZone || 'UTC' + } + eventData.end = { + dateTime: params.endDateTime, + timeZone: params.timeZone || 'UTC' + } + } + + // Handle attendees + if (params.attendees) { + eventData.attendees = params.attendees.split(',').map((email: string) => ({ + email: email.trim() + })) + } + + // Handle recurrence + if (params.recurrence) { + eventData.recurrence = [params.recurrence] + } + + // Handle reminders + if (params.reminderMinutes !== undefined) { + eventData.reminders = { + useDefault: false, + overrides: [ + { + method: 'popup', + minutes: params.reminderMinutes + } + ] + } + } + + if (params.visibility) eventData.visibility = params.visibility + const queryParams = new URLSearchParams() + if (params.sendUpdates) queryParams.append('sendUpdates', params.sendUpdates) + + const endpoint = `calendars/${encodeURIComponent(params.calendarId)}/events?${queryParams.toString()}` + + const response = await this.makeGoogleCalendarRequest({ endpoint, method: 'POST', body: eventData, params }) + return response + } catch (error) { + return formatToolError(`Error creating event: ${error}`, params) + } + } +} + +class GetEventTool extends BaseGoogleCalendarTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'get_event', + description: 'Get a specific event from Google Calendar', + schema: GetEventSchema, + baseUrl: '', + method: 'GET', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const endpoint = `calendars/${encodeURIComponent(params.calendarId)}/events/${encodeURIComponent(params.eventId)}` + const response = await this.makeGoogleCalendarRequest({ endpoint, params }) + return response + } catch (error) { + return formatToolError(`Error getting event: ${error}`, params) + } + } +} + +class UpdateEventTool extends BaseGoogleCalendarTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'update_event', + description: 'Update an existing event in Google Calendar', + schema: UpdateEventSchema, + baseUrl: '', + method: 'PUT', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const updateData: any = {} + + if (params.summary) updateData.summary = params.summary + if (params.description) updateData.description = params.description + if (params.location) updateData.location = params.location + + // Handle date/time updates + if (params.startDate && params.endDate) { + updateData.start = { date: params.startDate } + updateData.end = { date: params.endDate } + } else if (params.startDateTime && params.endDateTime) { + updateData.start = { + dateTime: params.startDateTime, + timeZone: params.timeZone || 'UTC' + } + updateData.end = { + dateTime: params.endDateTime, + timeZone: params.timeZone || 'UTC' + } + } + + if (params.attendees) { + updateData.attendees = params.attendees.split(',').map((email: string) => ({ + email: email.trim() + })) + } + + if (params.recurrence) { + updateData.recurrence = [params.recurrence] + } + + if (params.reminderMinutes !== undefined) { + updateData.reminders = { + useDefault: false, + overrides: [ + { + method: 'popup', + minutes: params.reminderMinutes + } + ] + } + } + + if (params.visibility) updateData.visibility = params.visibility + const queryParams = new URLSearchParams() + if (params.sendUpdates) queryParams.append('sendUpdates', params.sendUpdates) + + const endpoint = `calendars/${encodeURIComponent(params.calendarId)}/events/${encodeURIComponent( + params.eventId + )}?${queryParams.toString()}` + const response = await this.makeGoogleCalendarRequest({ endpoint, method: 'PUT', body: updateData, params }) + return response + } catch (error) { + return formatToolError(`Error updating event: ${error}`, params) + } + } +} + +class DeleteEventTool extends BaseGoogleCalendarTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'delete_event', + description: 'Delete an event from Google Calendar', + schema: DeleteEventSchema, + baseUrl: '', + method: 'DELETE', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const endpoint = `calendars/${encodeURIComponent(params.calendarId)}/events/${encodeURIComponent(params.eventId)}` + const response = await this.makeGoogleCalendarRequest({ endpoint, method: 'DELETE', params }) + return response || 'Event deleted successfully' + } catch (error) { + return formatToolError(`Error deleting event: ${error}`, params) + } + } +} + +class QuickAddEventTool extends BaseGoogleCalendarTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'quick_add_event', + description: 'Quick add event to Google Calendar using natural language', + schema: QuickAddEventSchema, + baseUrl: '', + method: 'POST', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const queryParams = new URLSearchParams() + queryParams.append('text', params.quickAddText) + + const endpoint = `calendars/${encodeURIComponent(params.calendarId)}/events/quickAdd?${queryParams.toString()}` + const response = await this.makeGoogleCalendarRequest({ endpoint, method: 'POST', params }) + return response + } catch (error) { + return formatToolError(`Error quick adding event: ${error}`, params) + } + } +} + +// Calendar Tools +class ListCalendarsTool extends BaseGoogleCalendarTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'list_calendars', + description: 'List calendars from Google Calendar', + schema: ListCalendarsSchema, + baseUrl: '', + method: 'GET', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const queryParams = new URLSearchParams() + + if (params.showHidden !== undefined) queryParams.append('showHidden', params.showHidden.toString()) + if (params.minAccessRole) queryParams.append('minAccessRole', params.minAccessRole) + + const endpoint = `users/me/calendarList?${queryParams.toString()}` + + try { + const response = await this.makeGoogleCalendarRequest({ endpoint, params }) + return response + } catch (error) { + return formatToolError(`Error listing calendars: ${error}`, params) + } + } +} + +class CreateCalendarTool extends BaseGoogleCalendarTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'create_calendar', + description: 'Create a new calendar in Google Calendar', + schema: CreateCalendarSchema, + baseUrl: '', + method: 'POST', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const calendarData: any = { + summary: params.summary + } + + if (params.description) calendarData.description = params.description + if (params.location) calendarData.location = params.location + if (params.timeZone) calendarData.timeZone = params.timeZone + + const endpoint = 'calendars' + const response = await this.makeGoogleCalendarRequest({ endpoint, method: 'POST', body: calendarData, params }) + return response + } catch (error) { + return formatToolError(`Error creating calendar: ${error}`, params) + } + } +} + +class GetCalendarTool extends BaseGoogleCalendarTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'get_calendar', + description: 'Get a specific calendar from Google Calendar', + schema: GetCalendarSchema, + baseUrl: '', + method: 'GET', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const endpoint = `calendars/${encodeURIComponent(params.calendarId)}` + const response = await this.makeGoogleCalendarRequest({ endpoint, params }) + return response + } catch (error) { + return formatToolError(`Error getting calendar: ${error}`, params) + } + } +} + +class UpdateCalendarTool extends BaseGoogleCalendarTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'update_calendar', + description: 'Update an existing calendar in Google Calendar', + schema: UpdateCalendarSchema, + baseUrl: '', + method: 'PUT', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const updateData: any = {} + + if (params.summary) updateData.summary = params.summary + if (params.description) updateData.description = params.description + if (params.location) updateData.location = params.location + if (params.timeZone) updateData.timeZone = params.timeZone + + const endpoint = `calendars/${encodeURIComponent(params.calendarId)}` + const response = await this.makeGoogleCalendarRequest({ endpoint, method: 'PUT', body: updateData, params }) + return response + } catch (error) { + return formatToolError(`Error updating calendar: ${error}`, params) + } + } +} + +class DeleteCalendarTool extends BaseGoogleCalendarTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'delete_calendar', + description: 'Delete a calendar from Google Calendar', + schema: DeleteCalendarSchema, + baseUrl: '', + method: 'DELETE', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const endpoint = `calendars/${encodeURIComponent(params.calendarId)}` + const response = await this.makeGoogleCalendarRequest({ endpoint, method: 'DELETE', params }) + return response || 'Calendar deleted successfully' + } catch (error) { + return formatToolError(`Error deleting calendar: ${error}`, params) + } + } +} + +class ClearCalendarTool extends BaseGoogleCalendarTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'clear_calendar', + description: 'Clear all events from a Google Calendar', + schema: ClearCalendarSchema, + baseUrl: '', + method: 'POST', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const endpoint = `calendars/${encodeURIComponent(params.calendarId)}/clear` + const response = await this.makeGoogleCalendarRequest({ endpoint, method: 'POST', params }) + return response || 'Calendar cleared successfully' + } catch (error) { + return formatToolError(`Error clearing calendar: ${error}`, params) + } + } +} + +// Freebusy Tools +class QueryFreebusyTool extends BaseGoogleCalendarTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'query_freebusy', + description: 'Query free/busy information for a set of calendars', + schema: QueryFreebusySchema, + baseUrl: '', + method: 'POST', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const freebusyData: any = { + timeMin: params.timeMin, + timeMax: params.timeMax, + items: params.calendarIds.split(',').map((id: string) => ({ + id: id.trim() + })) + } + + if (params.groupExpansionMax !== undefined) { + freebusyData.groupExpansionMax = params.groupExpansionMax + } + + if (params.calendarExpansionMax !== undefined) { + freebusyData.calendarExpansionMax = params.calendarExpansionMax + } + + const endpoint = 'freeBusy' + const response = await this.makeGoogleCalendarRequest({ endpoint, method: 'POST', body: freebusyData, params }) + return response + } catch (error) { + return formatToolError(`Error querying freebusy: ${error}`, params) + } + } +} + +export const createGoogleCalendarTools = (args?: RequestParameters): DynamicStructuredTool[] => { + const tools: DynamicStructuredTool[] = [] + const actions = args?.actions || [] + const accessToken = args?.accessToken || '' + const defaultParams = args?.defaultParams || {} + + // Event tools + if (actions.includes('listEvents')) { + tools.push(new ListEventsTool({ accessToken, defaultParams })) + } + + if (actions.includes('createEvent')) { + tools.push(new CreateEventTool({ accessToken, defaultParams })) + } + + if (actions.includes('getEvent')) { + tools.push(new GetEventTool({ accessToken, defaultParams })) + } + + if (actions.includes('updateEvent')) { + tools.push(new UpdateEventTool({ accessToken, defaultParams })) + } + + if (actions.includes('deleteEvent')) { + tools.push(new DeleteEventTool({ accessToken, defaultParams })) + } + + if (actions.includes('quickAddEvent')) { + tools.push(new QuickAddEventTool({ accessToken, defaultParams })) + } + + // Calendar tools + if (actions.includes('listCalendars')) { + tools.push(new ListCalendarsTool({ accessToken, defaultParams })) + } + + if (actions.includes('createCalendar')) { + tools.push(new CreateCalendarTool({ accessToken, defaultParams })) + } + + if (actions.includes('getCalendar')) { + tools.push(new GetCalendarTool({ accessToken, defaultParams })) + } + + if (actions.includes('updateCalendar')) { + tools.push(new UpdateCalendarTool({ accessToken, defaultParams })) + } + + if (actions.includes('deleteCalendar')) { + tools.push(new DeleteCalendarTool({ accessToken, defaultParams })) + } + + if (actions.includes('clearCalendar')) { + tools.push(new ClearCalendarTool({ accessToken, defaultParams })) + } + + // Freebusy tools + if (actions.includes('queryFreebusy')) { + tools.push(new QueryFreebusyTool({ accessToken, defaultParams })) + } + + return tools +} diff --git a/packages/components/nodes/tools/GoogleCalendar/google-calendar.svg b/packages/components/nodes/tools/GoogleCalendar/google-calendar.svg new file mode 100644 index 000000000..c5ba2d56f --- /dev/null +++ b/packages/components/nodes/tools/GoogleCalendar/google-calendar.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/components/nodes/tools/GoogleDocs/GoogleDocs.ts b/packages/components/nodes/tools/GoogleDocs/GoogleDocs.ts new file mode 100644 index 000000000..296a9dea8 --- /dev/null +++ b/packages/components/nodes/tools/GoogleDocs/GoogleDocs.ts @@ -0,0 +1,253 @@ +import { convertMultiOptionsToStringArray, getCredentialData, getCredentialParam, refreshOAuth2Token } from '../../../src/utils' +import { createGoogleDocsTools } from './core' +import type { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' + +class GoogleDocs_Tools implements INode { + label: string + name: string + version: number + type: string + icon: string + category: string + description: string + baseClasses: string[] + credential: INodeParams + inputs: INodeParams[] + + constructor() { + this.label = 'Google Docs' + this.name = 'googleDocsTool' + this.version = 1.0 + this.type = 'GoogleDocs' + this.icon = 'google-docs.svg' + this.category = 'Tools' + this.description = + 'Perform Google Docs operations such as creating, reading, updating, and deleting documents, as well as text manipulation' + this.baseClasses = ['Tool'] + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + credentialNames: ['googleDocsOAuth2'] + } + this.inputs = [ + // Document Actions + { + label: 'Actions', + name: 'actions', + type: 'multiOptions', + description: 'Actions to perform', + options: [ + { + label: 'Create Document', + name: 'createDocument' + }, + { + label: 'Get Document', + name: 'getDocument' + }, + { + label: 'Update Document', + name: 'updateDocument' + }, + { + label: 'Insert Text', + name: 'insertText' + }, + { + label: 'Replace Text', + name: 'replaceText' + }, + { + label: 'Append Text', + name: 'appendText' + }, + { + label: 'Get Text Content', + name: 'getTextContent' + }, + { + label: 'Insert Image', + name: 'insertImage' + }, + { + label: 'Create Table', + name: 'createTable' + } + ] + }, + // Document Parameters + { + label: 'Document ID', + name: 'documentId', + type: 'string', + description: 'Document ID for operations on specific documents', + show: { + actions: [ + 'getDocument', + 'updateDocument', + 'insertText', + 'replaceText', + 'appendText', + 'getTextContent', + 'insertImage', + 'createTable' + ] + }, + additionalParams: true, + optional: true + }, + { + label: 'Title', + name: 'title', + type: 'string', + description: 'Document title', + show: { + actions: ['createDocument'] + }, + additionalParams: true, + optional: true + }, + // Text Parameters + { + label: 'Text', + name: 'text', + type: 'string', + description: 'Text content to insert or append', + show: { + actions: ['createDocument', 'updateDocument', 'insertText', 'appendText'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Index', + name: 'index', + type: 'number', + description: 'Index where to insert text or media (1-based, default: 1 for beginning)', + default: 1, + show: { + actions: ['createDocument', 'updateDocument', 'insertText', 'insertImage', 'createTable'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Replace Text', + name: 'replaceText', + type: 'string', + description: 'Text to replace', + show: { + actions: ['updateDocument', 'replaceText'] + }, + additionalParams: true, + optional: true + }, + { + label: 'New Text', + name: 'newText', + type: 'string', + description: 'New text to replace with', + show: { + actions: ['updateDocument', 'replaceText'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Match Case', + name: 'matchCase', + type: 'boolean', + description: 'Whether the search should be case-sensitive', + default: false, + show: { + actions: ['updateDocument', 'replaceText'] + }, + additionalParams: true, + optional: true + }, + + // Media Parameters + { + label: 'Image URL', + name: 'imageUrl', + type: 'string', + description: 'URL of the image to insert', + show: { + actions: ['createDocument', 'updateDocument', 'insertImage'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Table Rows', + name: 'rows', + type: 'number', + description: 'Number of rows in the table', + show: { + actions: ['createDocument', 'updateDocument', 'createTable'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Table Columns', + name: 'columns', + type: 'number', + description: 'Number of columns in the table', + show: { + actions: ['createDocument', 'updateDocument', 'createTable'] + }, + additionalParams: true, + optional: true + } + ] + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + let credentialData = await getCredentialData(nodeData.credential ?? '', options) + credentialData = await refreshOAuth2Token(nodeData.credential ?? '', credentialData, options) + const accessToken = getCredentialParam('access_token', credentialData, nodeData) + + if (!accessToken) { + throw new Error('No access token found in credential') + } + + // Get all actions + const actions = convertMultiOptionsToStringArray(nodeData.inputs?.actions) + + const defaultParams = this.transformNodeInputsToToolArgs(nodeData) + + const tools = createGoogleDocsTools({ + accessToken, + actions, + defaultParams + }) + + return tools + } + + transformNodeInputsToToolArgs(nodeData: INodeData): Record { + const nodeInputs: Record = {} + + // Document parameters + if (nodeData.inputs?.documentId) nodeInputs.documentId = nodeData.inputs.documentId + if (nodeData.inputs?.title) nodeInputs.title = nodeData.inputs.title + + // Text parameters + if (nodeData.inputs?.text) nodeInputs.text = nodeData.inputs.text + if (nodeData.inputs?.index) nodeInputs.index = nodeData.inputs.index + if (nodeData.inputs?.replaceText) nodeInputs.replaceText = nodeData.inputs.replaceText + if (nodeData.inputs?.newText) nodeInputs.newText = nodeData.inputs.newText + if (nodeData.inputs?.matchCase !== undefined) nodeInputs.matchCase = nodeData.inputs.matchCase + + // Media parameters + if (nodeData.inputs?.imageUrl) nodeInputs.imageUrl = nodeData.inputs.imageUrl + if (nodeData.inputs?.rows) nodeInputs.rows = nodeData.inputs.rows + if (nodeData.inputs?.columns) nodeInputs.columns = nodeData.inputs.columns + + return nodeInputs + } +} + +module.exports = { nodeClass: GoogleDocs_Tools } diff --git a/packages/components/nodes/tools/GoogleDocs/core.ts b/packages/components/nodes/tools/GoogleDocs/core.ts new file mode 100644 index 000000000..51cfa6f8f --- /dev/null +++ b/packages/components/nodes/tools/GoogleDocs/core.ts @@ -0,0 +1,729 @@ +import { z } from 'zod' +import fetch from 'node-fetch' +import { DynamicStructuredTool } from '../OpenAPIToolkit/core' +import { TOOL_ARGS_PREFIX, formatToolError } from '../../../src/agents' + +export const desc = `Use this when you want to access Google Docs API for managing documents` + +export interface Headers { + [key: string]: string +} + +export interface Body { + [key: string]: any +} + +export interface RequestParameters { + headers?: Headers + body?: Body + url?: string + description?: string + name?: string + actions?: string[] + accessToken?: string + defaultParams?: any +} + +// Define schemas for different Google Docs operations + +// Document Schemas +const CreateDocumentSchema = z.object({ + title: z.string().describe('Document title'), + text: z.string().optional().describe('Text content to insert after creating document'), + index: z.number().optional().default(1).describe('Index where to insert text or media (1-based, default: 1 for beginning)'), + imageUrl: z.string().optional().describe('URL of the image to insert after creating document'), + rows: z.number().optional().describe('Number of rows in the table to create'), + columns: z.number().optional().describe('Number of columns in the table to create') +}) + +const GetDocumentSchema = z.object({ + documentId: z.string().describe('Document ID to retrieve') +}) + +const UpdateDocumentSchema = z.object({ + documentId: z.string().describe('Document ID to update'), + text: z.string().optional().describe('Text content to insert'), + index: z.number().optional().default(1).describe('Index where to insert text or media (1-based, default: 1 for beginning)'), + replaceText: z.string().optional().describe('Text to replace'), + newText: z.string().optional().describe('New text to replace with'), + matchCase: z.boolean().optional().default(false).describe('Whether the search should be case-sensitive'), + imageUrl: z.string().optional().describe('URL of the image to insert'), + rows: z.number().optional().describe('Number of rows in the table to create'), + columns: z.number().optional().describe('Number of columns in the table to create') +}) + +const InsertTextSchema = z.object({ + documentId: z.string().describe('Document ID'), + text: z.string().describe('Text to insert'), + index: z.number().optional().default(1).describe('Index where to insert text (1-based, default: 1 for beginning)') +}) + +const ReplaceTextSchema = z.object({ + documentId: z.string().describe('Document ID'), + replaceText: z.string().describe('Text to replace'), + newText: z.string().describe('New text to replace with'), + matchCase: z.boolean().optional().default(false).describe('Whether the search should be case-sensitive') +}) + +const AppendTextSchema = z.object({ + documentId: z.string().describe('Document ID'), + text: z.string().describe('Text to append to the document') +}) + +const GetTextContentSchema = z.object({ + documentId: z.string().describe('Document ID to get text content from') +}) + +const InsertImageSchema = z.object({ + documentId: z.string().describe('Document ID'), + imageUrl: z.string().describe('URL of the image to insert'), + index: z.number().optional().default(1).describe('Index where to insert image (1-based)') +}) + +const CreateTableSchema = z.object({ + documentId: z.string().describe('Document ID'), + rows: z.number().describe('Number of rows in the table'), + columns: z.number().describe('Number of columns in the table'), + index: z.number().optional().default(1).describe('Index where to insert table (1-based)') +}) + +class BaseGoogleDocsTool extends DynamicStructuredTool { + protected accessToken: string = '' + + constructor(args: any) { + super(args) + this.accessToken = args.accessToken ?? '' + } + + async makeGoogleDocsRequest({ + endpoint, + method = 'GET', + body, + params + }: { + endpoint: string + method?: string + body?: any + params?: any + }): Promise { + const url = `https://docs.googleapis.com/v1/${endpoint}` + + const headers = { + Authorization: `Bearer ${this.accessToken}`, + 'Content-Type': 'application/json', + Accept: 'application/json', + ...this.headers + } + + const response = await fetch(url, { + method, + headers, + body: body ? JSON.stringify(body) : undefined + }) + + if (!response.ok) { + const errorText = await response.text() + throw new Error(`Google Docs API Error ${response.status}: ${response.statusText} - ${errorText}`) + } + + const data = await response.text() + return data + TOOL_ARGS_PREFIX + JSON.stringify(params) + } + + async makeDriveRequest({ + endpoint, + method = 'GET', + body, + params + }: { + endpoint: string + method?: string + body?: any + params?: any + }): Promise { + const url = `https://www.googleapis.com/drive/v3/${endpoint}` + + const headers = { + Authorization: `Bearer ${this.accessToken}`, + 'Content-Type': 'application/json', + Accept: 'application/json', + ...this.headers + } + + const response = await fetch(url, { + method, + headers, + body: body ? JSON.stringify(body) : undefined + }) + + if (!response.ok) { + const errorText = await response.text() + throw new Error(`Google Drive API Error ${response.status}: ${response.statusText} - ${errorText}`) + } + + const data = await response.text() + return data + TOOL_ARGS_PREFIX + JSON.stringify(params) + } +} + +// Document Tools +class CreateDocumentTool extends BaseGoogleDocsTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'create_document', + description: 'Create a new Google Docs document', + schema: CreateDocumentSchema, + baseUrl: '', + method: 'POST', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const documentData = { + title: params.title + } + + const endpoint = 'documents' + const createResponse = await this.makeGoogleDocsRequest({ + endpoint, + method: 'POST', + body: documentData, + params + }) + + // Get the document ID from the response + const documentResponse = JSON.parse(createResponse.split(TOOL_ARGS_PREFIX)[0]) + const documentId = documentResponse.documentId + + // Now add content if provided + const requests = [] + + if (params.text) { + requests.push({ + insertText: { + location: { + index: params.index || 1 + }, + text: params.text + } + }) + } + + if (params.imageUrl) { + requests.push({ + insertInlineImage: { + location: { + index: params.index || 1 + }, + uri: params.imageUrl + } + }) + } + + if (params.rows && params.columns) { + requests.push({ + insertTable: { + location: { + index: params.index || 1 + }, + rows: params.rows, + columns: params.columns + } + }) + } + + // If we have content to add, make a batch update + if (requests.length > 0) { + const updateEndpoint = `documents/${encodeURIComponent(documentId)}:batchUpdate` + await this.makeGoogleDocsRequest({ + endpoint: updateEndpoint, + method: 'POST', + body: { requests }, + params: {} + }) + } + + return createResponse + } catch (error) { + return formatToolError(`Error creating document: ${error}`, params) + } + } +} + +class GetDocumentTool extends BaseGoogleDocsTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'get_document', + description: 'Get a Google Docs document by ID', + schema: GetDocumentSchema, + baseUrl: '', + method: 'GET', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const endpoint = `documents/${encodeURIComponent(params.documentId)}` + const response = await this.makeGoogleDocsRequest({ endpoint, params }) + return response + } catch (error) { + return formatToolError(`Error getting document: ${error}`, params) + } + } +} + +class UpdateDocumentTool extends BaseGoogleDocsTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'update_document', + description: 'Update a Google Docs document with batch requests', + schema: UpdateDocumentSchema, + baseUrl: '', + method: 'POST', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const requests = [] + + // Insert text + if (params.text) { + requests.push({ + insertText: { + location: { + index: params.index || 1 + }, + text: params.text + } + }) + } + + // Replace text + if (params.replaceText && params.newText) { + requests.push({ + replaceAllText: { + containsText: { + text: params.replaceText, + matchCase: params.matchCase || false + }, + replaceText: params.newText + } + }) + } + + // Insert image + if (params.imageUrl) { + requests.push({ + insertInlineImage: { + location: { + index: params.index || 1 + }, + uri: params.imageUrl + } + }) + } + + // Create table + if (params.rows && params.columns) { + requests.push({ + insertTable: { + location: { + index: params.index || 1 + }, + rows: params.rows, + columns: params.columns + } + }) + } + + if (requests.length > 0) { + const endpoint = `documents/${encodeURIComponent(params.documentId)}:batchUpdate` + const response = await this.makeGoogleDocsRequest({ + endpoint, + method: 'POST', + body: { requests }, + params + }) + return response + } else { + return `No updates specified` + TOOL_ARGS_PREFIX + JSON.stringify(params) + } + } catch (error) { + return formatToolError(`Error updating document: ${error}`, params) + } + } +} + +class InsertTextTool extends BaseGoogleDocsTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'insert_text', + description: 'Insert text into a Google Docs document', + schema: InsertTextSchema, + baseUrl: '', + method: 'POST', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const requests = [ + { + insertText: { + location: { + index: params.index + }, + text: params.text + } + } + ] + + const endpoint = `documents/${encodeURIComponent(params.documentId)}:batchUpdate` + const response = await this.makeGoogleDocsRequest({ + endpoint, + method: 'POST', + body: { requests }, + params + }) + return response + } catch (error) { + return formatToolError(`Error inserting text: ${error}`, params) + } + } +} + +class ReplaceTextTool extends BaseGoogleDocsTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'replace_text', + description: 'Replace text in a Google Docs document', + schema: ReplaceTextSchema, + baseUrl: '', + method: 'POST', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const requests = [ + { + replaceAllText: { + containsText: { + text: params.replaceText, + matchCase: params.matchCase + }, + replaceText: params.newText + } + } + ] + + const endpoint = `documents/${encodeURIComponent(params.documentId)}:batchUpdate` + const response = await this.makeGoogleDocsRequest({ + endpoint, + method: 'POST', + body: { requests }, + params + }) + return response + } catch (error) { + return formatToolError(`Error replacing text: ${error}`, params) + } + } +} + +class AppendTextTool extends BaseGoogleDocsTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'append_text', + description: 'Append text to the end of a Google Docs document', + schema: AppendTextSchema, + baseUrl: '', + method: 'POST', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + // First get the document to find the end index + const getEndpoint = `documents/${encodeURIComponent(params.documentId)}` + const docResponse = await this.makeGoogleDocsRequest({ endpoint: getEndpoint, params: {} }) + const docData = JSON.parse(docResponse.split(TOOL_ARGS_PREFIX)[0]) + + // Get the end index of the document body + const endIndex = docData.body.content[docData.body.content.length - 1].endIndex - 1 + + const requests = [ + { + insertText: { + location: { + index: endIndex + }, + text: params.text + } + } + ] + + const endpoint = `documents/${encodeURIComponent(params.documentId)}:batchUpdate` + const response = await this.makeGoogleDocsRequest({ + endpoint, + method: 'POST', + body: { requests }, + params + }) + return response + } catch (error) { + return formatToolError(`Error appending text: ${error}`, params) + } + } +} + +class GetTextContentTool extends BaseGoogleDocsTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'get_text_content', + description: 'Get the text content from a Google Docs document', + schema: GetTextContentSchema, + baseUrl: '', + method: 'GET', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const endpoint = `documents/${encodeURIComponent(params.documentId)}` + const response = await this.makeGoogleDocsRequest({ endpoint, params }) + + // Extract and return just the text content + const docData = JSON.parse(response.split(TOOL_ARGS_PREFIX)[0]) + let textContent = '' + + const extractText = (element: any) => { + if (element.paragraph) { + element.paragraph.elements?.forEach((elem: any) => { + if (elem.textRun) { + textContent += elem.textRun.content + } + }) + } + } + + docData.body.content?.forEach(extractText) + + return JSON.stringify({ textContent }) + TOOL_ARGS_PREFIX + JSON.stringify(params) + } catch (error) { + return formatToolError(`Error getting text content: ${error}`, params) + } + } +} + +class InsertImageTool extends BaseGoogleDocsTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'insert_image', + description: 'Insert an image into a Google Docs document', + schema: InsertImageSchema, + baseUrl: '', + method: 'POST', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const requests = [ + { + insertInlineImage: { + location: { + index: params.index + }, + uri: params.imageUrl + } + } + ] + + const endpoint = `documents/${encodeURIComponent(params.documentId)}:batchUpdate` + const response = await this.makeGoogleDocsRequest({ + endpoint, + method: 'POST', + body: { requests }, + params + }) + return response + } catch (error) { + return formatToolError(`Error inserting image: ${error}`, params) + } + } +} + +class CreateTableTool extends BaseGoogleDocsTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'create_table', + description: 'Create a table in a Google Docs document', + schema: CreateTableSchema, + baseUrl: '', + method: 'POST', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const requests = [ + { + insertTable: { + location: { + index: params.index + }, + rows: params.rows, + columns: params.columns + } + } + ] + + const endpoint = `documents/${encodeURIComponent(params.documentId)}:batchUpdate` + const response = await this.makeGoogleDocsRequest({ + endpoint, + method: 'POST', + body: { requests }, + params + }) + return response + } catch (error) { + return formatToolError(`Error creating table: ${error}`, params) + } + } +} + +export const createGoogleDocsTools = (args?: RequestParameters): DynamicStructuredTool[] => { + const actions = args?.actions || [] + const tools: DynamicStructuredTool[] = [] + + if (actions.includes('createDocument') || actions.length === 0) { + tools.push(new CreateDocumentTool(args)) + } + + if (actions.includes('getDocument') || actions.length === 0) { + tools.push(new GetDocumentTool(args)) + } + + if (actions.includes('updateDocument') || actions.length === 0) { + tools.push(new UpdateDocumentTool(args)) + } + + if (actions.includes('insertText') || actions.length === 0) { + tools.push(new InsertTextTool(args)) + } + + if (actions.includes('replaceText') || actions.length === 0) { + tools.push(new ReplaceTextTool(args)) + } + + if (actions.includes('appendText') || actions.length === 0) { + tools.push(new AppendTextTool(args)) + } + + if (actions.includes('getTextContent') || actions.length === 0) { + tools.push(new GetTextContentTool(args)) + } + + if (actions.includes('insertImage') || actions.length === 0) { + tools.push(new InsertImageTool(args)) + } + + if (actions.includes('createTable') || actions.length === 0) { + tools.push(new CreateTableTool(args)) + } + + return tools +} diff --git a/packages/components/nodes/tools/GoogleDocs/google-docs.svg b/packages/components/nodes/tools/GoogleDocs/google-docs.svg new file mode 100644 index 000000000..7406241a1 --- /dev/null +++ b/packages/components/nodes/tools/GoogleDocs/google-docs.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/components/nodes/tools/GoogleDrive/GoogleDrive.ts b/packages/components/nodes/tools/GoogleDrive/GoogleDrive.ts new file mode 100644 index 000000000..ec44367df --- /dev/null +++ b/packages/components/nodes/tools/GoogleDrive/GoogleDrive.ts @@ -0,0 +1,663 @@ +import { convertMultiOptionsToStringArray, getCredentialData, getCredentialParam, refreshOAuth2Token } from '../../../src/utils' +import { createGoogleDriveTools } from './core' +import type { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' + +class GoogleDrive_Tools implements INode { + label: string + name: string + version: number + type: string + icon: string + category: string + description: string + baseClasses: string[] + credential: INodeParams + inputs: INodeParams[] + + constructor() { + this.label = 'Google Drive' + this.name = 'googleDriveTool' + this.version = 1.0 + this.type = 'GoogleDrive' + this.icon = 'google-drive.svg' + this.category = 'Tools' + this.description = 'Perform Google Drive operations such as managing files, folders, sharing, and searching' + this.baseClasses = ['Tool'] + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + credentialNames: ['googleDriveOAuth2'] + } + this.inputs = [ + { + label: 'Type', + name: 'driveType', + type: 'options', + description: 'Type of Google Drive operation', + options: [ + { + label: 'File', + name: 'file' + }, + { + label: 'Folder', + name: 'folder' + }, + { + label: 'Search', + name: 'search' + }, + { + label: 'Share', + name: 'share' + } + ] + }, + // File Actions + { + label: 'File Actions', + name: 'fileActions', + type: 'multiOptions', + description: 'Actions to perform on files', + options: [ + { + label: 'List Files', + name: 'listFiles' + }, + { + label: 'Get File', + name: 'getFile' + }, + { + label: 'Create File', + name: 'createFile' + }, + { + label: 'Update File', + name: 'updateFile' + }, + { + label: 'Delete File', + name: 'deleteFile' + }, + { + label: 'Copy File', + name: 'copyFile' + }, + { + label: 'Download File', + name: 'downloadFile' + } + ], + show: { + driveType: ['file'] + } + }, + // Folder Actions + { + label: 'Folder Actions', + name: 'folderActions', + type: 'multiOptions', + description: 'Actions to perform on folders', + options: [ + { + label: 'Create Folder', + name: 'createFolder' + }, + { + label: 'List Folder Contents', + name: 'listFolderContents' + }, + { + label: 'Delete Folder', + name: 'deleteFolder' + } + ], + show: { + driveType: ['folder'] + } + }, + // Search Actions + { + label: 'Search Actions', + name: 'searchActions', + type: 'multiOptions', + description: 'Search operations', + options: [ + { + label: 'Search Files', + name: 'searchFiles' + } + ], + show: { + driveType: ['search'] + } + }, + // Share Actions + { + label: 'Share Actions', + name: 'shareActions', + type: 'multiOptions', + description: 'Sharing operations', + options: [ + { + label: 'Share File', + name: 'shareFile' + }, + { + label: 'Get Permissions', + name: 'getPermissions' + }, + { + label: 'Remove Permission', + name: 'removePermission' + } + ], + show: { + driveType: ['share'] + } + }, + // File Parameters + { + label: 'File ID', + name: 'fileId', + type: 'string', + description: 'File ID for file operations', + show: { + fileActions: ['getFile', 'updateFile', 'deleteFile', 'copyFile', 'downloadFile'] + }, + additionalParams: true, + optional: true + }, + { + label: 'File ID', + name: 'fileId', + type: 'string', + description: 'File ID for sharing operations', + show: { + shareActions: ['shareFile', 'getPermissions', 'removePermission'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Folder ID', + name: 'folderId', + type: 'string', + description: 'Folder ID for folder operations', + show: { + folderActions: ['listFolderContents', 'deleteFolder'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Permission ID', + name: 'permissionId', + type: 'string', + description: 'Permission ID to remove', + show: { + shareActions: ['removePermission'] + }, + additionalParams: true, + optional: true + }, + { + label: 'File Name', + name: 'fileName', + type: 'string', + description: 'Name of the file', + show: { + fileActions: ['createFile', 'copyFile'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Folder Name', + name: 'fileName', + type: 'string', + description: 'Name of the folder', + show: { + folderActions: ['createFolder'] + }, + additionalParams: true, + optional: true + }, + { + label: 'File Content', + name: 'fileContent', + type: 'string', + description: 'Content of the file (for text files)', + show: { + fileActions: ['createFile'] + }, + additionalParams: true, + optional: true + }, + { + label: 'MIME Type', + name: 'mimeType', + type: 'string', + description: 'MIME type of the file (e.g., text/plain, application/pdf)', + show: { + fileActions: ['createFile'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Parent Folder ID', + name: 'parentFolderId', + type: 'string', + description: 'ID of the parent folder (comma-separated for multiple parents)', + show: { + fileActions: ['createFile', 'copyFile'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Parent Folder ID', + name: 'parentFolderId', + type: 'string', + description: 'ID of the parent folder for the new folder', + show: { + folderActions: ['createFolder'] + }, + additionalParams: true, + optional: true + }, + { + label: 'File Description', + name: 'description', + type: 'string', + description: 'File description', + show: { + fileActions: ['createFile', 'updateFile'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Folder Description', + name: 'description', + type: 'string', + description: 'Folder description', + show: { + folderActions: ['createFolder'] + }, + additionalParams: true, + optional: true + }, + // Search Parameters + { + label: 'Search Query', + name: 'searchQuery', + type: 'string', + description: 'Search query using Google Drive search syntax', + show: { + searchActions: ['searchFiles'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Max Results', + name: 'maxResults', + type: 'number', + description: 'Maximum number of results to return (1-1000)', + default: 10, + show: { + fileActions: ['listFiles'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Max Results', + name: 'maxResults', + type: 'number', + description: 'Maximum number of results to return (1-1000)', + default: 10, + show: { + searchActions: ['searchFiles'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Order By', + name: 'orderBy', + type: 'options', + description: 'Sort order for file results', + options: [ + { + label: 'Name', + name: 'name' + }, + { + label: 'Created Time', + name: 'createdTime' + }, + { + label: 'Modified Time', + name: 'modifiedTime' + }, + { + label: 'Size', + name: 'quotaBytesUsed' + }, + { + label: 'Folder', + name: 'folder' + } + ], + show: { + fileActions: ['listFiles'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Order By', + name: 'orderBy', + type: 'options', + description: 'Sort order for search results', + options: [ + { + label: 'Name', + name: 'name' + }, + { + label: 'Created Time', + name: 'createdTime' + }, + { + label: 'Modified Time', + name: 'modifiedTime' + }, + { + label: 'Size', + name: 'quotaBytesUsed' + }, + { + label: 'Folder', + name: 'folder' + } + ], + show: { + searchActions: ['searchFiles'] + }, + additionalParams: true, + optional: true + }, + // Share Parameters + { + label: 'Share Role', + name: 'shareRole', + type: 'options', + description: 'Permission role for sharing', + options: [ + { + label: 'Reader', + name: 'reader' + }, + { + label: 'Writer', + name: 'writer' + }, + { + label: 'Commenter', + name: 'commenter' + }, + { + label: 'Owner', + name: 'owner' + } + ], + show: { + shareActions: ['shareFile'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Share Type', + name: 'shareType', + type: 'options', + description: 'Type of permission', + options: [ + { + label: 'User', + name: 'user' + }, + { + label: 'Group', + name: 'group' + }, + { + label: 'Domain', + name: 'domain' + }, + { + label: 'Anyone', + name: 'anyone' + } + ], + show: { + shareActions: ['shareFile'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Email Address', + name: 'emailAddress', + type: 'string', + description: 'Email address for user/group sharing', + show: { + shareActions: ['shareFile'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Domain Name', + name: 'domainName', + type: 'string', + description: 'Domain name for domain sharing', + show: { + shareActions: ['shareFile'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Send Notification Email', + name: 'sendNotificationEmail', + type: 'boolean', + description: 'Whether to send notification emails when sharing', + default: true, + show: { + shareActions: ['shareFile'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Email Message', + name: 'emailMessage', + type: 'string', + description: 'Custom message to include in notification email', + show: { + shareActions: ['shareFile'] + }, + additionalParams: true, + optional: true + }, + // Advanced Parameters for File Actions + { + label: 'Include Items From All Drives', + name: 'includeItemsFromAllDrives', + type: 'boolean', + description: 'Include items from all drives (shared drives)', + show: { + fileActions: ['listFiles'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Include Items From All Drives', + name: 'includeItemsFromAllDrives', + type: 'boolean', + description: 'Include items from all drives (shared drives)', + show: { + searchActions: ['searchFiles'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Supports All Drives', + name: 'supportsAllDrives', + type: 'boolean', + description: 'Whether the application supports both My Drives and shared drives', + show: { + fileActions: ['listFiles', 'getFile', 'createFile', 'updateFile', 'deleteFile', 'copyFile', 'downloadFile'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Supports All Drives', + name: 'supportsAllDrives', + type: 'boolean', + description: 'Whether the application supports both My Drives and shared drives', + show: { + folderActions: ['createFolder', 'listFolderContents', 'deleteFolder'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Supports All Drives', + name: 'supportsAllDrives', + type: 'boolean', + description: 'Whether the application supports both My Drives and shared drives', + show: { + searchActions: ['searchFiles'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Supports All Drives', + name: 'supportsAllDrives', + type: 'boolean', + description: 'Whether the application supports both My Drives and shared drives', + show: { + shareActions: ['shareFile', 'getPermissions', 'removePermission'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Fields', + name: 'fields', + type: 'string', + description: 'Specific fields to include in response (e.g., "files(id,name,mimeType)")', + show: { + fileActions: ['listFiles', 'getFile'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Acknowledge Abuse', + name: 'acknowledgeAbuse', + type: 'boolean', + description: 'Acknowledge the risk of downloading known malware or abusive files', + show: { + fileActions: ['getFile', 'downloadFile'] + }, + additionalParams: true, + optional: true + } + ] + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + let credentialData = await getCredentialData(nodeData.credential ?? '', options) + credentialData = await refreshOAuth2Token(nodeData.credential ?? '', credentialData, options) + const accessToken = getCredentialParam('access_token', credentialData, nodeData) + + if (!accessToken) { + throw new Error('No access token found in credential') + } + + const driveType = nodeData.inputs?.driveType as string + const fileActions = convertMultiOptionsToStringArray(nodeData.inputs?.fileActions) + const folderActions = convertMultiOptionsToStringArray(nodeData.inputs?.folderActions) + const searchActions = convertMultiOptionsToStringArray(nodeData.inputs?.searchActions) + const shareActions = convertMultiOptionsToStringArray(nodeData.inputs?.shareActions) + + // Combine all actions based on type + let actions: string[] = [] + if (driveType === 'file') { + actions = fileActions + } else if (driveType === 'folder') { + actions = folderActions + } else if (driveType === 'search') { + actions = searchActions + } else if (driveType === 'share') { + actions = shareActions + } + + const defaultParams = this.transformNodeInputsToToolArgs(nodeData) + + const tools = createGoogleDriveTools({ + accessToken, + actions, + defaultParams + }) + + return tools + } + + transformNodeInputsToToolArgs(nodeData: INodeData): Record { + // Collect default parameters from inputs + const defaultParams: Record = {} + + // Add parameters based on the inputs provided + if (nodeData.inputs?.fileId) defaultParams.fileId = nodeData.inputs.fileId + if (nodeData.inputs?.folderId) defaultParams.folderId = nodeData.inputs.folderId + if (nodeData.inputs?.permissionId) defaultParams.permissionId = nodeData.inputs.permissionId + if (nodeData.inputs?.fileName) defaultParams.name = nodeData.inputs.fileName + if (nodeData.inputs?.fileContent) defaultParams.content = nodeData.inputs.fileContent + if (nodeData.inputs?.mimeType) defaultParams.mimeType = nodeData.inputs.mimeType + if (nodeData.inputs?.parentFolderId) defaultParams.parents = nodeData.inputs.parentFolderId + if (nodeData.inputs?.description) defaultParams.description = nodeData.inputs.description + if (nodeData.inputs?.searchQuery) defaultParams.query = nodeData.inputs.searchQuery + if (nodeData.inputs?.maxResults) defaultParams.pageSize = nodeData.inputs.maxResults + if (nodeData.inputs?.orderBy) defaultParams.orderBy = nodeData.inputs.orderBy + if (nodeData.inputs?.shareRole) defaultParams.role = nodeData.inputs.shareRole + if (nodeData.inputs?.shareType) defaultParams.type = nodeData.inputs.shareType + if (nodeData.inputs?.emailAddress) defaultParams.emailAddress = nodeData.inputs.emailAddress + if (nodeData.inputs?.domainName) defaultParams.domain = nodeData.inputs.domainName + if (nodeData.inputs?.sendNotificationEmail !== undefined) + defaultParams.sendNotificationEmail = nodeData.inputs.sendNotificationEmail + if (nodeData.inputs?.emailMessage) defaultParams.emailMessage = nodeData.inputs.emailMessage + if (nodeData.inputs?.includeItemsFromAllDrives !== undefined) + defaultParams.includeItemsFromAllDrives = nodeData.inputs.includeItemsFromAllDrives + if (nodeData.inputs?.supportsAllDrives !== undefined) defaultParams.supportsAllDrives = nodeData.inputs.supportsAllDrives + if (nodeData.inputs?.fields) defaultParams.fields = nodeData.inputs.fields + if (nodeData.inputs?.acknowledgeAbuse !== undefined) defaultParams.acknowledgeAbuse = nodeData.inputs.acknowledgeAbuse + + return defaultParams + } +} + +module.exports = { nodeClass: GoogleDrive_Tools } diff --git a/packages/components/nodes/tools/GoogleDrive/core.ts b/packages/components/nodes/tools/GoogleDrive/core.ts new file mode 100644 index 000000000..e67cc6ae7 --- /dev/null +++ b/packages/components/nodes/tools/GoogleDrive/core.ts @@ -0,0 +1,982 @@ +import { z } from 'zod' +import fetch from 'node-fetch' +import { DynamicStructuredTool } from '../OpenAPIToolkit/core' +import { TOOL_ARGS_PREFIX, formatToolError } from '../../../src/agents' + +export const desc = `Use this when you want to access Google Drive API for managing files and folders` + +export interface Headers { + [key: string]: string +} + +export interface Body { + [key: string]: any +} + +export interface RequestParameters { + headers?: Headers + body?: Body + url?: string + description?: string + name?: string + actions?: string[] + accessToken?: string + defaultParams?: any +} + +// Define schemas for different Google Drive operations + +// File Schemas +const ListFilesSchema = z.object({ + pageSize: z.number().optional().default(10).describe('Maximum number of files to return (1-1000)'), + pageToken: z.string().optional().describe('Token for next page of results'), + orderBy: z.string().optional().describe('Sort order (name, folder, createdTime, modifiedTime, etc.)'), + query: z.string().optional().describe('Search query (e.g., "name contains \'hello\'")'), + spaces: z.string().optional().default('drive').describe('Spaces to search (drive, appDataFolder, photos)'), + fields: z.string().optional().describe('Fields to include in response'), + includeItemsFromAllDrives: z.boolean().optional().describe('Include items from all drives'), + supportsAllDrives: z.boolean().optional().describe('Whether the requesting application supports both My Drives and shared drives') +}) + +const GetFileSchema = z.object({ + fileId: z.string().describe('File ID'), + fields: z.string().optional().describe('Fields to include in response'), + supportsAllDrives: z.boolean().optional().describe('Whether the requesting application supports both My Drives and shared drives'), + acknowledgeAbuse: z + .boolean() + .optional() + .describe('Whether the user is acknowledging the risk of downloading known malware or other abusive files') +}) + +const CreateFileSchema = z.object({ + name: z.string().describe('File name'), + parents: z.string().optional().describe('Comma-separated list of parent folder IDs'), + mimeType: z.string().optional().describe('MIME type of the file'), + description: z.string().optional().describe('File description'), + content: z.string().optional().describe('File content (for text files)'), + supportsAllDrives: z.boolean().optional().describe('Whether the requesting application supports both My Drives and shared drives') +}) + +const UpdateFileSchema = z.object({ + fileId: z.string().describe('File ID to update'), + name: z.string().optional().describe('New file name'), + description: z.string().optional().describe('New file description'), + starred: z.boolean().optional().describe('Whether the file is starred'), + trashed: z.boolean().optional().describe('Whether the file is trashed'), + parents: z.string().optional().describe('Comma-separated list of new parent folder IDs'), + supportsAllDrives: z.boolean().optional().describe('Whether the requesting application supports both My Drives and shared drives') +}) + +const DeleteFileSchema = z.object({ + fileId: z.string().describe('File ID to delete'), + supportsAllDrives: z.boolean().optional().describe('Whether the requesting application supports both My Drives and shared drives') +}) + +const CopyFileSchema = z.object({ + fileId: z.string().describe('File ID to copy'), + name: z.string().describe('Name for the copied file'), + parents: z.string().optional().describe('Comma-separated list of parent folder IDs for the copy'), + supportsAllDrives: z.boolean().optional().describe('Whether the requesting application supports both My Drives and shared drives') +}) + +const DownloadFileSchema = z.object({ + fileId: z.string().describe('File ID to download'), + acknowledgeAbuse: z + .boolean() + .optional() + .describe('Whether the user is acknowledging the risk of downloading known malware or other abusive files'), + supportsAllDrives: z.boolean().optional().describe('Whether the requesting application supports both My Drives and shared drives') +}) + +const CreateFolderSchema = z.object({ + name: z.string().describe('Folder name'), + parents: z.string().optional().describe('Comma-separated list of parent folder IDs'), + description: z.string().optional().describe('Folder description'), + supportsAllDrives: z.boolean().optional().describe('Whether the requesting application supports both My Drives and shared drives') +}) + +const SearchFilesSchema = z.object({ + query: z.string().describe('Search query using Google Drive search syntax'), + pageSize: z.number().optional().default(10).describe('Maximum number of files to return'), + orderBy: z.string().optional().describe('Sort order'), + includeItemsFromAllDrives: z.boolean().optional().describe('Include items from all drives'), + supportsAllDrives: z.boolean().optional().describe('Whether the requesting application supports both My Drives and shared drives') +}) + +const ShareFileSchema = z.object({ + fileId: z.string().describe('File ID to share'), + role: z.enum(['reader', 'writer', 'commenter', 'owner']).describe('Permission role'), + type: z.enum(['user', 'group', 'domain', 'anyone']).describe('Permission type'), + emailAddress: z.string().optional().describe('Email address (required for user/group types)'), + domain: z.string().optional().describe('Domain name (required for domain type)'), + allowFileDiscovery: z.boolean().optional().describe('Whether the file can be discovered by search'), + sendNotificationEmail: z.boolean().optional().default(true).describe('Whether to send notification emails'), + emailMessage: z.string().optional().describe('Custom message to include in notification email'), + supportsAllDrives: z.boolean().optional().describe('Whether the requesting application supports both My Drives and shared drives') +}) + +class BaseGoogleDriveTool extends DynamicStructuredTool { + protected accessToken: string = '' + + constructor(args: any) { + super(args) + this.accessToken = args.accessToken ?? '' + } + + async makeGoogleDriveRequest({ + endpoint, + method = 'GET', + body, + params + }: { + endpoint: string + method?: string + body?: any + params?: any + }): Promise { + const baseUrl = 'https://www.googleapis.com/drive/v3' + const url = `${baseUrl}/${endpoint}` + + const headers: { [key: string]: string } = { + Authorization: `Bearer ${this.accessToken}`, + Accept: 'application/json', + ...this.headers + } + + if (method !== 'GET' && body) { + headers['Content-Type'] = 'application/json' + } + + const response = await fetch(url, { + method, + headers, + body: body ? (typeof body === 'string' ? body : JSON.stringify(body)) : undefined + }) + + if (!response.ok) { + const errorText = await response.text() + throw new Error(`Google Drive API Error ${response.status}: ${response.statusText} - ${errorText}`) + } + + const data = await response.text() + return data + TOOL_ARGS_PREFIX + JSON.stringify(params) + } +} + +// File Tools +class ListFilesTool extends BaseGoogleDriveTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'list_files', + description: 'List files and folders from Google Drive', + schema: ListFilesSchema, + baseUrl: '', + method: 'GET', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const queryParams = new URLSearchParams() + + if (params.pageSize) queryParams.append('pageSize', params.pageSize.toString()) + if (params.pageToken) queryParams.append('pageToken', params.pageToken) + if (params.orderBy) queryParams.append('orderBy', params.orderBy) + if (params.query) queryParams.append('q', params.query) + if (params.spaces) queryParams.append('spaces', params.spaces) + if (params.fields) queryParams.append('fields', params.fields) + if (params.includeItemsFromAllDrives) queryParams.append('includeItemsFromAllDrives', params.includeItemsFromAllDrives.toString()) + if (params.supportsAllDrives) queryParams.append('supportsAllDrives', params.supportsAllDrives.toString()) + + const endpoint = `files?${queryParams.toString()}` + + try { + const response = await this.makeGoogleDriveRequest({ endpoint, params }) + return response + } catch (error) { + return formatToolError(`Error listing files: ${error}`, params) + } + } +} + +class GetFileTool extends BaseGoogleDriveTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'get_file', + description: 'Get file metadata from Google Drive', + schema: GetFileSchema, + baseUrl: '', + method: 'GET', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const queryParams = new URLSearchParams() + + if (params.fields) queryParams.append('fields', params.fields) + if (params.supportsAllDrives) queryParams.append('supportsAllDrives', params.supportsAllDrives.toString()) + if (params.acknowledgeAbuse) queryParams.append('acknowledgeAbuse', params.acknowledgeAbuse.toString()) + + const endpoint = `files/${encodeURIComponent(params.fileId)}?${queryParams.toString()}` + + try { + const response = await this.makeGoogleDriveRequest({ endpoint, params }) + return response + } catch (error) { + return formatToolError(`Error getting file: ${error}`, params) + } + } +} + +class CreateFileTool extends BaseGoogleDriveTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'create_file', + description: 'Create a new file in Google Drive', + schema: CreateFileSchema, + baseUrl: '', + method: 'POST', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + // Validate required parameters + if (!params.name) { + throw new Error('File name is required') + } + + const queryParams = new URLSearchParams() + if (params.supportsAllDrives) queryParams.append('supportsAllDrives', params.supportsAllDrives.toString()) + + // Prepare metadata + const fileMetadata: any = { + name: params.name + } + + if (params.parents) { + // Validate parent folder IDs format + const parentIds = params.parents + .split(',') + .map((p: string) => p.trim()) + .filter((p: string) => p.length > 0) + if (parentIds.length > 0) { + fileMetadata.parents = parentIds + } + } + if (params.mimeType) fileMetadata.mimeType = params.mimeType + if (params.description) fileMetadata.description = params.description + + // Determine upload type based on content and metadata + if (!params.content) { + // Metadata-only upload (no file content) - standard endpoint + const endpoint = `files?${queryParams.toString()}` + const response = await this.makeGoogleDriveRequest({ + endpoint, + method: 'POST', + body: fileMetadata, + params + }) + return response + } else { + // Validate content + if (typeof params.content !== 'string') { + throw new Error('File content must be a string') + } + + // Check if we have metadata beyond just the name + const hasAdditionalMetadata = params.parents || params.description || params.mimeType + + if (!hasAdditionalMetadata) { + // Simple upload (uploadType=media) - only file content, basic metadata + return await this.performSimpleUpload(params, queryParams) + } else { + // Multipart upload (uploadType=multipart) - file content + metadata + return await this.performMultipartUpload(params, fileMetadata, queryParams) + } + } + } catch (error) { + return formatToolError(`Error creating file: ${error}`, params) + } + } + + private async performSimpleUpload(params: any, queryParams: URLSearchParams): Promise { + // Simple upload: POST https://www.googleapis.com/upload/drive/v3/files?uploadType=media + queryParams.append('uploadType', 'media') + const url = `https://www.googleapis.com/upload/drive/v3/files?${queryParams.toString()}` + + const headers: { [key: string]: string } = { + Authorization: `Bearer ${this.accessToken}`, + 'Content-Type': params.mimeType || 'application/octet-stream', + 'Content-Length': Buffer.byteLength(params.content, 'utf8').toString() + } + + const response = await fetch(url, { + method: 'POST', + headers, + body: params.content + }) + + if (!response.ok) { + const errorText = await response.text() + throw new Error(`Google Drive API Error ${response.status}: ${response.statusText} - ${errorText}`) + } + + const data = await response.text() + return data + TOOL_ARGS_PREFIX + JSON.stringify(params) + } + + private async performMultipartUpload(params: any, fileMetadata: any, queryParams: URLSearchParams): Promise { + // Multipart upload: POST https://www.googleapis.com/upload/drive/v3/files?uploadType=multipart + queryParams.append('uploadType', 'multipart') + const url = `https://www.googleapis.com/upload/drive/v3/files?${queryParams.toString()}` + + // Create multipart/related body according to RFC 2387 + const boundary = '-------314159265358979323846' + + // Build multipart body - RFC 2387 format + let body = `--${boundary}\r\n` + + // Part 1: Metadata (application/json; charset=UTF-8) + body += 'Content-Type: application/json; charset=UTF-8\r\n\r\n' + body += JSON.stringify(fileMetadata) + '\r\n' + + // Part 2: Media content (any MIME type) + body += `--${boundary}\r\n` + body += `Content-Type: ${params.mimeType || 'application/octet-stream'}\r\n\r\n` + body += params.content + '\r\n' + + // Close boundary + body += `--${boundary}--` + + const headers: { [key: string]: string } = { + Authorization: `Bearer ${this.accessToken}`, + 'Content-Type': `multipart/related; boundary="${boundary}"`, + 'Content-Length': Buffer.byteLength(body, 'utf8').toString() + } + + try { + const response = await fetch(url, { + method: 'POST', + headers, + body: body + }) + + if (!response.ok) { + const errorText = await response.text() + console.error('Multipart upload failed:', { + url, + headers: { ...headers, Authorization: '[REDACTED]' }, + metadata: fileMetadata, + contentLength: params.content?.length || 0, + error: errorText + }) + throw new Error(`Google Drive API Error ${response.status}: ${response.statusText} - ${errorText}`) + } + + const data = await response.text() + return data + TOOL_ARGS_PREFIX + JSON.stringify(params) + } catch (error) { + throw new Error(`Multipart upload failed: ${error}`) + } + } +} + +class UpdateFileTool extends BaseGoogleDriveTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'update_file', + description: 'Update file metadata in Google Drive', + schema: UpdateFileSchema, + baseUrl: '', + method: 'PATCH', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const updateData: any = {} + + if (params.name) updateData.name = params.name + if (params.description) updateData.description = params.description + if (params.starred !== undefined) updateData.starred = params.starred + if (params.trashed !== undefined) updateData.trashed = params.trashed + + const queryParams = new URLSearchParams() + if (params.supportsAllDrives) queryParams.append('supportsAllDrives', params.supportsAllDrives.toString()) + + const endpoint = `files/${encodeURIComponent(params.fileId)}?${queryParams.toString()}` + + const response = await this.makeGoogleDriveRequest({ + endpoint, + method: 'PATCH', + body: updateData, + params + }) + return response + } catch (error) { + return formatToolError(`Error updating file: ${error}`, params) + } + } +} + +class DeleteFileTool extends BaseGoogleDriveTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'delete_file', + description: 'Delete a file from Google Drive', + schema: DeleteFileSchema, + baseUrl: '', + method: 'DELETE', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const queryParams = new URLSearchParams() + if (params.supportsAllDrives) queryParams.append('supportsAllDrives', params.supportsAllDrives.toString()) + + const endpoint = `files/${encodeURIComponent(params.fileId)}?${queryParams.toString()}` + + await this.makeGoogleDriveRequest({ + endpoint, + method: 'DELETE', + params + }) + return `File deleted successfully` + } catch (error) { + return formatToolError(`Error deleting file: ${error}`, params) + } + } +} + +class CopyFileTool extends BaseGoogleDriveTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'copy_file', + description: 'Copy a file in Google Drive', + schema: CopyFileSchema, + baseUrl: '', + method: 'POST', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const copyData: any = { + name: params.name + } + + if (params.parents) { + copyData.parents = params.parents.split(',').map((p: string) => p.trim()) + } + + const queryParams = new URLSearchParams() + if (params.supportsAllDrives) queryParams.append('supportsAllDrives', params.supportsAllDrives.toString()) + + const endpoint = `files/${encodeURIComponent(params.fileId)}/copy?${queryParams.toString()}` + + const response = await this.makeGoogleDriveRequest({ + endpoint, + method: 'POST', + body: copyData, + params + }) + return response + } catch (error) { + return formatToolError(`Error copying file: ${error}`, params) + } + } +} + +class DownloadFileTool extends BaseGoogleDriveTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'download_file', + description: 'Download a file from Google Drive', + schema: DownloadFileSchema, + baseUrl: '', + method: 'GET', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const queryParams = new URLSearchParams() + queryParams.append('alt', 'media') + if (params.acknowledgeAbuse) queryParams.append('acknowledgeAbuse', params.acknowledgeAbuse.toString()) + if (params.supportsAllDrives) queryParams.append('supportsAllDrives', params.supportsAllDrives.toString()) + + const endpoint = `files/${encodeURIComponent(params.fileId)}?${queryParams.toString()}` + + const response = await this.makeGoogleDriveRequest({ endpoint, params }) + return response + } catch (error) { + return formatToolError(`Error downloading file: ${error}`, params) + } + } +} + +class CreateFolderTool extends BaseGoogleDriveTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'create_folder', + description: 'Create a new folder in Google Drive', + schema: CreateFolderSchema, + baseUrl: '', + method: 'POST', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const folderData: any = { + name: params.name, + mimeType: 'application/vnd.google-apps.folder' + } + + if (params.parents) { + folderData.parents = params.parents.split(',').map((p: string) => p.trim()) + } + if (params.description) folderData.description = params.description + + const queryParams = new URLSearchParams() + if (params.supportsAllDrives) queryParams.append('supportsAllDrives', params.supportsAllDrives.toString()) + + const endpoint = `files?${queryParams.toString()}` + + const response = await this.makeGoogleDriveRequest({ + endpoint, + method: 'POST', + body: folderData, + params + }) + return response + } catch (error) { + return formatToolError(`Error creating folder: ${error}`, params) + } + } +} + +class SearchFilesTool extends BaseGoogleDriveTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'search_files', + description: 'Search files in Google Drive', + schema: SearchFilesSchema, + baseUrl: '', + method: 'GET', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const queryParams = new URLSearchParams() + queryParams.append('q', params.query) + if (params.pageSize) queryParams.append('pageSize', params.pageSize.toString()) + if (params.orderBy) queryParams.append('orderBy', params.orderBy) + if (params.includeItemsFromAllDrives) + queryParams.append('includeItemsFromAllDrives', params.includeItemsFromAllDrives.toString()) + if (params.supportsAllDrives) queryParams.append('supportsAllDrives', params.supportsAllDrives.toString()) + + const endpoint = `files?${queryParams.toString()}` + + const response = await this.makeGoogleDriveRequest({ endpoint, params }) + return response + } catch (error) { + return formatToolError(`Error searching files: ${error}`, params) + } + } +} + +class ShareFileTool extends BaseGoogleDriveTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'share_file', + description: 'Share a file in Google Drive', + schema: ShareFileSchema, + baseUrl: '', + method: 'POST', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const permissionData: any = { + role: params.role, + type: params.type + } + + if (params.emailAddress) permissionData.emailAddress = params.emailAddress + if (params.domain) permissionData.domain = params.domain + if (params.allowFileDiscovery !== undefined) permissionData.allowFileDiscovery = params.allowFileDiscovery + + const queryParams = new URLSearchParams() + if (params.sendNotificationEmail !== undefined) + queryParams.append('sendNotificationEmail', params.sendNotificationEmail.toString()) + if (params.emailMessage) queryParams.append('emailMessage', params.emailMessage) + if (params.supportsAllDrives) queryParams.append('supportsAllDrives', params.supportsAllDrives.toString()) + + const endpoint = `files/${encodeURIComponent(params.fileId)}/permissions?${queryParams.toString()}` + + const response = await this.makeGoogleDriveRequest({ + endpoint, + method: 'POST', + body: permissionData, + params + }) + return response + } catch (error) { + return formatToolError(`Error sharing file: ${error}`, params) + } + } +} + +class ListFolderContentsTool extends BaseGoogleDriveTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'list_folder_contents', + description: 'List contents of a specific folder in Google Drive', + schema: z.object({ + folderId: z.string().describe('Folder ID to list contents from'), + pageSize: z.number().optional().default(10).describe('Maximum number of files to return'), + orderBy: z.string().optional().describe('Sort order'), + includeItemsFromAllDrives: z.boolean().optional().describe('Include items from all drives'), + supportsAllDrives: z + .boolean() + .optional() + .describe('Whether the requesting application supports both My Drives and shared drives') + }), + baseUrl: '', + method: 'GET', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const queryParams = new URLSearchParams() + queryParams.append('q', `'${params.folderId}' in parents`) + if (params.pageSize) queryParams.append('pageSize', params.pageSize.toString()) + if (params.orderBy) queryParams.append('orderBy', params.orderBy) + if (params.includeItemsFromAllDrives) + queryParams.append('includeItemsFromAllDrives', params.includeItemsFromAllDrives.toString()) + if (params.supportsAllDrives) queryParams.append('supportsAllDrives', params.supportsAllDrives.toString()) + + const endpoint = `files?${queryParams.toString()}` + + const response = await this.makeGoogleDriveRequest({ endpoint, params }) + return response + } catch (error) { + return formatToolError(`Error listing folder contents: ${error}`, params) + } + } +} + +class DeleteFolderTool extends BaseGoogleDriveTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'delete_folder', + description: 'Delete a folder from Google Drive', + schema: z.object({ + folderId: z.string().describe('Folder ID to delete'), + supportsAllDrives: z + .boolean() + .optional() + .describe('Whether the requesting application supports both My Drives and shared drives') + }), + baseUrl: '', + method: 'DELETE', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const queryParams = new URLSearchParams() + if (params.supportsAllDrives) queryParams.append('supportsAllDrives', params.supportsAllDrives.toString()) + + const endpoint = `files/${encodeURIComponent(params.folderId)}?${queryParams.toString()}` + + await this.makeGoogleDriveRequest({ + endpoint, + method: 'DELETE', + params + }) + return `Folder deleted successfully` + } catch (error) { + return formatToolError(`Error deleting folder: ${error}`, params) + } + } +} + +class GetPermissionsTool extends BaseGoogleDriveTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'get_permissions', + description: 'Get permissions for a file in Google Drive', + schema: z.object({ + fileId: z.string().describe('File ID to get permissions for'), + supportsAllDrives: z + .boolean() + .optional() + .describe('Whether the requesting application supports both My Drives and shared drives') + }), + baseUrl: '', + method: 'GET', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const queryParams = new URLSearchParams() + if (params.supportsAllDrives) queryParams.append('supportsAllDrives', params.supportsAllDrives.toString()) + + const endpoint = `files/${encodeURIComponent(params.fileId)}/permissions?${queryParams.toString()}` + + const response = await this.makeGoogleDriveRequest({ endpoint, params }) + return response + } catch (error) { + return formatToolError(`Error getting permissions: ${error}`, params) + } + } +} + +class RemovePermissionTool extends BaseGoogleDriveTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'remove_permission', + description: 'Remove a permission from a file in Google Drive', + schema: z.object({ + fileId: z.string().describe('File ID to remove permission from'), + permissionId: z.string().describe('Permission ID to remove'), + supportsAllDrives: z + .boolean() + .optional() + .describe('Whether the requesting application supports both My Drives and shared drives') + }), + baseUrl: '', + method: 'DELETE', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const queryParams = new URLSearchParams() + if (params.supportsAllDrives) queryParams.append('supportsAllDrives', params.supportsAllDrives.toString()) + + const endpoint = `files/${encodeURIComponent(params.fileId)}/permissions/${encodeURIComponent( + params.permissionId + )}?${queryParams.toString()}` + + await this.makeGoogleDriveRequest({ + endpoint, + method: 'DELETE', + params + }) + return `Permission removed successfully` + } catch (error) { + return formatToolError(`Error removing permission: ${error}`, params) + } + } +} + +export const createGoogleDriveTools = (args?: RequestParameters): DynamicStructuredTool[] => { + const tools: DynamicStructuredTool[] = [] + const actions = args?.actions || [] + const accessToken = args?.accessToken || '' + const defaultParams = args?.defaultParams || {} + + if (actions.includes('listFiles')) { + tools.push(new ListFilesTool({ accessToken, defaultParams })) + } + + if (actions.includes('getFile')) { + tools.push(new GetFileTool({ accessToken, defaultParams })) + } + + if (actions.includes('createFile')) { + tools.push(new CreateFileTool({ accessToken, defaultParams })) + } + + if (actions.includes('updateFile')) { + tools.push(new UpdateFileTool({ accessToken, defaultParams })) + } + + if (actions.includes('deleteFile')) { + tools.push(new DeleteFileTool({ accessToken, defaultParams })) + } + + if (actions.includes('copyFile')) { + tools.push(new CopyFileTool({ accessToken, defaultParams })) + } + + if (actions.includes('downloadFile')) { + tools.push(new DownloadFileTool({ accessToken, defaultParams })) + } + + if (actions.includes('createFolder')) { + tools.push(new CreateFolderTool({ accessToken, defaultParams })) + } + + if (actions.includes('listFolderContents')) { + tools.push(new ListFolderContentsTool({ accessToken, defaultParams })) + } + + if (actions.includes('deleteFolder')) { + tools.push(new DeleteFolderTool({ accessToken, defaultParams })) + } + + if (actions.includes('searchFiles')) { + tools.push(new SearchFilesTool({ accessToken, defaultParams })) + } + + if (actions.includes('shareFile')) { + tools.push(new ShareFileTool({ accessToken, defaultParams })) + } + + if (actions.includes('getPermissions')) { + tools.push(new GetPermissionsTool({ accessToken, defaultParams })) + } + + if (actions.includes('removePermission')) { + tools.push(new RemovePermissionTool({ accessToken, defaultParams })) + } + + return tools +} diff --git a/packages/components/nodes/tools/GoogleDrive/google-drive.svg b/packages/components/nodes/tools/GoogleDrive/google-drive.svg new file mode 100644 index 000000000..03b2f2129 --- /dev/null +++ b/packages/components/nodes/tools/GoogleDrive/google-drive.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/components/nodes/tools/GoogleSheets/GoogleSheets.ts b/packages/components/nodes/tools/GoogleSheets/GoogleSheets.ts new file mode 100644 index 000000000..60f15903a --- /dev/null +++ b/packages/components/nodes/tools/GoogleSheets/GoogleSheets.ts @@ -0,0 +1,368 @@ +import { convertMultiOptionsToStringArray, getCredentialData, getCredentialParam, refreshOAuth2Token } from '../../../src/utils' +import { createGoogleSheetsTools } from './core' +import type { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' + +class GoogleSheets_Tools implements INode { + label: string + name: string + version: number + type: string + icon: string + category: string + description: string + baseClasses: string[] + credential: INodeParams + inputs: INodeParams[] + + constructor() { + this.label = 'Google Sheets' + this.name = 'googleSheetsTool' + this.version = 1.0 + this.type = 'GoogleSheets' + this.icon = 'google-sheets.svg' + this.category = 'Tools' + this.description = 'Perform Google Sheets operations such as managing spreadsheets, reading and writing values' + this.baseClasses = ['Tool'] + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + credentialNames: ['googleSheetsOAuth2'] + } + this.inputs = [ + { + label: 'Type', + name: 'sheetsType', + type: 'options', + description: 'Type of Google Sheets operation', + options: [ + { + label: 'Spreadsheet', + name: 'spreadsheet' + }, + { + label: 'Values', + name: 'values' + } + ] + }, + // Spreadsheet Actions + { + label: 'Spreadsheet Actions', + name: 'spreadsheetActions', + type: 'multiOptions', + description: 'Actions to perform on spreadsheets', + options: [ + { + label: 'Create Spreadsheet', + name: 'createSpreadsheet' + }, + { + label: 'Get Spreadsheet', + name: 'getSpreadsheet' + }, + { + label: 'Update Spreadsheet', + name: 'updateSpreadsheet' + } + ], + show: { + sheetsType: ['spreadsheet'] + } + }, + // Values Actions + { + label: 'Values Actions', + name: 'valuesActions', + type: 'multiOptions', + description: 'Actions to perform on sheet values', + options: [ + { + label: 'Get Values', + name: 'getValues' + }, + { + label: 'Update Values', + name: 'updateValues' + }, + { + label: 'Append Values', + name: 'appendValues' + }, + { + label: 'Clear Values', + name: 'clearValues' + }, + { + label: 'Batch Get Values', + name: 'batchGetValues' + }, + { + label: 'Batch Update Values', + name: 'batchUpdateValues' + }, + { + label: 'Batch Clear Values', + name: 'batchClearValues' + } + ], + show: { + sheetsType: ['values'] + } + }, + // Spreadsheet Parameters + { + label: 'Spreadsheet ID', + name: 'spreadsheetId', + type: 'string', + description: 'The ID of the spreadsheet', + show: { + sheetsType: ['spreadsheet', 'values'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Title', + name: 'title', + type: 'string', + description: 'The title of the spreadsheet', + show: { + spreadsheetActions: ['createSpreadsheet', 'updateSpreadsheet'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Sheet Count', + name: 'sheetCount', + type: 'number', + description: 'Number of sheets to create', + default: 1, + show: { + spreadsheetActions: ['createSpreadsheet'] + }, + additionalParams: true, + optional: true + }, + // Values Parameters + { + label: 'Range', + name: 'range', + type: 'string', + description: 'The range to read/write (e.g., A1:B2, Sheet1!A1:C10)', + show: { + valuesActions: ['getValues', 'updateValues', 'clearValues'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Ranges', + name: 'ranges', + type: 'string', + description: 'Comma-separated list of ranges for batch operations', + show: { + valuesActions: ['batchGetValues', 'batchClearValues'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Values', + name: 'values', + type: 'string', + description: 'JSON array of values to write (e.g., [["A1", "B1"], ["A2", "B2"]])', + show: { + valuesActions: ['updateValues', 'appendValues', 'batchUpdateValues'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Value Input Option', + name: 'valueInputOption', + type: 'options', + description: 'How input data should be interpreted', + options: [ + { + label: 'Raw', + name: 'RAW' + }, + { + label: 'User Entered', + name: 'USER_ENTERED' + } + ], + default: 'USER_ENTERED', + show: { + valuesActions: ['updateValues', 'appendValues', 'batchUpdateValues'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Value Render Option', + name: 'valueRenderOption', + type: 'options', + description: 'How values should be represented in the output', + options: [ + { + label: 'Formatted Value', + name: 'FORMATTED_VALUE' + }, + { + label: 'Unformatted Value', + name: 'UNFORMATTED_VALUE' + }, + { + label: 'Formula', + name: 'FORMULA' + } + ], + default: 'FORMATTED_VALUE', + show: { + valuesActions: ['getValues', 'batchGetValues'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Date Time Render Option', + name: 'dateTimeRenderOption', + type: 'options', + description: 'How dates, times, and durations should be represented', + options: [ + { + label: 'Serial Number', + name: 'SERIAL_NUMBER' + }, + { + label: 'Formatted String', + name: 'FORMATTED_STRING' + } + ], + default: 'FORMATTED_STRING', + show: { + valuesActions: ['getValues', 'batchGetValues'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Insert Data Option', + name: 'insertDataOption', + type: 'options', + description: 'How data should be inserted', + options: [ + { + label: 'Overwrite', + name: 'OVERWRITE' + }, + { + label: 'Insert Rows', + name: 'INSERT_ROWS' + } + ], + default: 'OVERWRITE', + show: { + valuesActions: ['appendValues'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Include Grid Data', + name: 'includeGridData', + type: 'boolean', + description: 'True if grid data should be returned', + default: false, + show: { + spreadsheetActions: ['getSpreadsheet'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Major Dimension', + name: 'majorDimension', + type: 'options', + description: 'The major dimension that results should use', + options: [ + { + label: 'Rows', + name: 'ROWS' + }, + { + label: 'Columns', + name: 'COLUMNS' + } + ], + default: 'ROWS', + show: { + valuesActions: ['getValues', 'updateValues', 'appendValues', 'batchGetValues', 'batchUpdateValues'] + }, + additionalParams: true, + optional: true + } + ] + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const sheetsType = nodeData.inputs?.sheetsType as string + + let credentialData = await getCredentialData(nodeData.credential ?? '', options) + credentialData = await refreshOAuth2Token(nodeData.credential ?? '', credentialData, options) + const accessToken = getCredentialParam('access_token', credentialData, nodeData) + + if (!accessToken) { + throw new Error('No access token found in credential') + } + + // Get all actions based on type + let actions: string[] = [] + + if (sheetsType === 'spreadsheet') { + actions = convertMultiOptionsToStringArray(nodeData.inputs?.spreadsheetActions) + } else if (sheetsType === 'values') { + actions = convertMultiOptionsToStringArray(nodeData.inputs?.valuesActions) + } + + const defaultParams = this.transformNodeInputsToToolArgs(nodeData) + + const tools = createGoogleSheetsTools({ + accessToken, + actions, + defaultParams + }) + + return tools + } + + transformNodeInputsToToolArgs(nodeData: INodeData): Record { + // Collect default parameters from inputs + const defaultParams: Record = {} + + // Common parameters + if (nodeData.inputs?.spreadsheetId) defaultParams.spreadsheetId = nodeData.inputs.spreadsheetId + + // Spreadsheet parameters + if (nodeData.inputs?.title) defaultParams.title = nodeData.inputs.title + if (nodeData.inputs?.sheetCount) defaultParams.sheetCount = nodeData.inputs.sheetCount + if (nodeData.inputs?.includeGridData !== undefined) defaultParams.includeGridData = nodeData.inputs.includeGridData + + // Values parameters + if (nodeData.inputs?.range) defaultParams.range = nodeData.inputs.range + if (nodeData.inputs?.ranges) defaultParams.ranges = nodeData.inputs.ranges + if (nodeData.inputs?.values) defaultParams.values = nodeData.inputs.values + if (nodeData.inputs?.valueInputOption) defaultParams.valueInputOption = nodeData.inputs.valueInputOption + if (nodeData.inputs?.valueRenderOption) defaultParams.valueRenderOption = nodeData.inputs.valueRenderOption + if (nodeData.inputs?.dateTimeRenderOption) defaultParams.dateTimeRenderOption = nodeData.inputs.dateTimeRenderOption + if (nodeData.inputs?.insertDataOption) defaultParams.insertDataOption = nodeData.inputs.insertDataOption + if (nodeData.inputs?.majorDimension) defaultParams.majorDimension = nodeData.inputs.majorDimension + + return defaultParams + } +} + +module.exports = { nodeClass: GoogleSheets_Tools } diff --git a/packages/components/nodes/tools/GoogleSheets/core.ts b/packages/components/nodes/tools/GoogleSheets/core.ts new file mode 100644 index 000000000..ad64ce498 --- /dev/null +++ b/packages/components/nodes/tools/GoogleSheets/core.ts @@ -0,0 +1,674 @@ +import { z } from 'zod' +import fetch from 'node-fetch' +import { DynamicStructuredTool } from '../OpenAPIToolkit/core' +import { TOOL_ARGS_PREFIX, formatToolError } from '../../../src/agents' + +export const desc = `Use this when you want to access Google Sheets API for managing spreadsheets and values` + +export interface Headers { + [key: string]: string +} + +export interface Body { + [key: string]: any +} + +export interface RequestParameters { + headers?: Headers + body?: Body + url?: string + description?: string + name?: string + actions?: string[] + accessToken?: string + defaultParams?: any +} + +// Define schemas for different Google Sheets operations + +// Spreadsheet Schemas +const CreateSpreadsheetSchema = z.object({ + title: z.string().describe('The title of the spreadsheet'), + sheetCount: z.number().optional().default(1).describe('Number of sheets to create'), + locale: z.string().optional().describe('The locale of the spreadsheet (e.g., en_US)'), + timeZone: z.string().optional().describe('The time zone of the spreadsheet (e.g., America/New_York)') +}) + +const GetSpreadsheetSchema = z.object({ + spreadsheetId: z.string().describe('The ID of the spreadsheet to retrieve'), + ranges: z.string().optional().describe('Comma-separated list of ranges to retrieve'), + includeGridData: z.boolean().optional().default(false).describe('True if grid data should be returned') +}) + +const UpdateSpreadsheetSchema = z.object({ + spreadsheetId: z.string().describe('The ID of the spreadsheet to update'), + title: z.string().optional().describe('New title for the spreadsheet'), + locale: z.string().optional().describe('New locale for the spreadsheet'), + timeZone: z.string().optional().describe('New time zone for the spreadsheet') +}) + +// Values Schemas +const GetValuesSchema = z.object({ + spreadsheetId: z.string().describe('The ID of the spreadsheet'), + range: z.string().describe('The A1 notation of the range to retrieve values from'), + valueRenderOption: z + .enum(['FORMATTED_VALUE', 'UNFORMATTED_VALUE', 'FORMULA']) + .optional() + .default('FORMATTED_VALUE') + .describe('How values should be represented'), + dateTimeRenderOption: z + .enum(['SERIAL_NUMBER', 'FORMATTED_STRING']) + .optional() + .default('FORMATTED_STRING') + .describe('How dates should be represented'), + majorDimension: z.enum(['ROWS', 'COLUMNS']).optional().default('ROWS').describe('The major dimension that results should use') +}) + +const UpdateValuesSchema = z.object({ + spreadsheetId: z.string().describe('The ID of the spreadsheet'), + range: z.string().describe('The A1 notation of the range to update'), + values: z.string().describe('JSON array of values to write (e.g., [["A1", "B1"], ["A2", "B2"]])'), + valueInputOption: z.enum(['RAW', 'USER_ENTERED']).optional().default('USER_ENTERED').describe('How input data should be interpreted'), + majorDimension: z.enum(['ROWS', 'COLUMNS']).optional().default('ROWS').describe('The major dimension of the values') +}) + +const AppendValuesSchema = z.object({ + spreadsheetId: z.string().describe('The ID of the spreadsheet'), + range: z.string().describe('The A1 notation of the range to append to'), + values: z.string().describe('JSON array of values to append'), + valueInputOption: z.enum(['RAW', 'USER_ENTERED']).optional().default('USER_ENTERED').describe('How input data should be interpreted'), + insertDataOption: z.enum(['OVERWRITE', 'INSERT_ROWS']).optional().default('OVERWRITE').describe('How data should be inserted'), + majorDimension: z.enum(['ROWS', 'COLUMNS']).optional().default('ROWS').describe('The major dimension of the values') +}) + +const ClearValuesSchema = z.object({ + spreadsheetId: z.string().describe('The ID of the spreadsheet'), + range: z.string().describe('The A1 notation of the range to clear') +}) + +const BatchGetValuesSchema = z.object({ + spreadsheetId: z.string().describe('The ID of the spreadsheet'), + ranges: z.string().describe('Comma-separated list of ranges to retrieve'), + valueRenderOption: z + .enum(['FORMATTED_VALUE', 'UNFORMATTED_VALUE', 'FORMULA']) + .optional() + .default('FORMATTED_VALUE') + .describe('How values should be represented'), + dateTimeRenderOption: z + .enum(['SERIAL_NUMBER', 'FORMATTED_STRING']) + .optional() + .default('FORMATTED_STRING') + .describe('How dates should be represented'), + majorDimension: z.enum(['ROWS', 'COLUMNS']).optional().default('ROWS').describe('The major dimension that results should use') +}) + +const BatchUpdateValuesSchema = z.object({ + spreadsheetId: z.string().describe('The ID of the spreadsheet'), + valueInputOption: z.enum(['RAW', 'USER_ENTERED']).optional().default('USER_ENTERED').describe('How input data should be interpreted'), + values: z + .string() + .describe('JSON array of value ranges to update (e.g., [{"range": "A1:B2", "values": [["A1", "B1"], ["A2", "B2"]]}])'), + includeValuesInResponse: z.boolean().optional().default(false).describe('Whether to return the updated values in the response') +}) + +const BatchClearValuesSchema = z.object({ + spreadsheetId: z.string().describe('The ID of the spreadsheet'), + ranges: z.string().describe('Comma-separated list of ranges to clear') +}) + +class BaseGoogleSheetsTool extends DynamicStructuredTool { + protected accessToken: string = '' + + constructor(args: any) { + super(args) + this.accessToken = args.accessToken ?? '' + } + + async makeGoogleSheetsRequest({ + endpoint, + method = 'GET', + body, + params + }: { + endpoint: string + method?: string + body?: any + params?: any + }): Promise { + const url = `https://sheets.googleapis.com/v4/${endpoint}` + + const headers = { + Authorization: `Bearer ${this.accessToken}`, + 'Content-Type': 'application/json', + Accept: 'application/json', + ...this.headers + } + + const response = await fetch(url, { + method, + headers, + body: body ? JSON.stringify(body) : undefined + }) + + if (!response.ok) { + const errorText = await response.text() + throw new Error(`Google Sheets API Error ${response.status}: ${response.statusText} - ${errorText}`) + } + + const data = await response.text() + return data + TOOL_ARGS_PREFIX + JSON.stringify(params) + } +} + +// Spreadsheet Tools +class CreateSpreadsheetTool extends BaseGoogleSheetsTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'create_spreadsheet', + description: 'Create a new Google Spreadsheet', + schema: CreateSpreadsheetSchema, + baseUrl: '', + method: 'POST', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const body: any = { + properties: { + title: params.title + } + } + + if (params.locale) body.properties.locale = params.locale + if (params.timeZone) body.properties.timeZone = params.timeZone + + // Add sheets if specified + if (params.sheetCount && params.sheetCount > 1) { + body.sheets = [] + for (let i = 0; i < params.sheetCount; i++) { + body.sheets.push({ + properties: { + title: i === 0 ? 'Sheet1' : `Sheet${i + 1}` + } + }) + } + } + + return await this.makeGoogleSheetsRequest({ + endpoint: 'spreadsheets', + method: 'POST', + body, + params + }) + } catch (error) { + return formatToolError(`Error creating spreadsheet: ${error}`, params) + } + } +} + +class GetSpreadsheetTool extends BaseGoogleSheetsTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'get_spreadsheet', + description: 'Get a Google Spreadsheet by ID', + schema: GetSpreadsheetSchema, + baseUrl: '', + method: 'GET', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const queryParams = new URLSearchParams() + + if (params.ranges) { + params.ranges.split(',').forEach((range: string) => { + queryParams.append('ranges', range.trim()) + }) + } + if (params.includeGridData) queryParams.append('includeGridData', 'true') + + const queryString = queryParams.toString() + const endpoint = `spreadsheets/${params.spreadsheetId}${queryString ? `?${queryString}` : ''}` + + return await this.makeGoogleSheetsRequest({ + endpoint, + method: 'GET', + params + }) + } catch (error) { + return formatToolError(`Error getting spreadsheet: ${error}`, params) + } + } +} + +class UpdateSpreadsheetTool extends BaseGoogleSheetsTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'update_spreadsheet', + description: 'Update a Google Spreadsheet properties', + schema: UpdateSpreadsheetSchema, + baseUrl: '', + method: 'POST', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const requests = [] + if (params.title || params.locale || params.timeZone) { + const updateProperties: any = {} + if (params.title) updateProperties.title = params.title + if (params.locale) updateProperties.locale = params.locale + if (params.timeZone) updateProperties.timeZone = params.timeZone + + requests.push({ + updateSpreadsheetProperties: { + properties: updateProperties, + fields: Object.keys(updateProperties).join(',') + } + }) + } + + const body = { requests } + + return await this.makeGoogleSheetsRequest({ + endpoint: `spreadsheets/${params.spreadsheetId}:batchUpdate`, + method: 'POST', + body, + params + }) + } catch (error) { + return formatToolError(`Error updating spreadsheet: ${error}`, params) + } + } +} + +// Values Tools +class GetValuesTool extends BaseGoogleSheetsTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'get_values', + description: 'Get values from a Google Spreadsheet range', + schema: GetValuesSchema, + baseUrl: '', + method: 'GET', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const queryParams = new URLSearchParams() + + if (params.valueRenderOption) queryParams.append('valueRenderOption', params.valueRenderOption) + if (params.dateTimeRenderOption) queryParams.append('dateTimeRenderOption', params.dateTimeRenderOption) + if (params.majorDimension) queryParams.append('majorDimension', params.majorDimension) + + const queryString = queryParams.toString() + const encodedRange = encodeURIComponent(params.range) + const endpoint = `spreadsheets/${params.spreadsheetId}/values/${encodedRange}${queryString ? `?${queryString}` : ''}` + + return await this.makeGoogleSheetsRequest({ + endpoint, + method: 'GET', + params + }) + } catch (error) { + return formatToolError(`Error getting values: ${error}`, params) + } + } +} + +class UpdateValuesTool extends BaseGoogleSheetsTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'update_values', + description: 'Update values in a Google Spreadsheet range', + schema: UpdateValuesSchema, + baseUrl: '', + method: 'PUT', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + let values + try { + values = JSON.parse(params.values) + } catch (error) { + throw new Error('Values must be a valid JSON array') + } + + const body = { + values, + majorDimension: params.majorDimension || 'ROWS' + } + + const queryParams = new URLSearchParams() + queryParams.append('valueInputOption', params.valueInputOption || 'USER_ENTERED') + + const encodedRange = encodeURIComponent(params.range) + const endpoint = `spreadsheets/${params.spreadsheetId}/values/${encodedRange}?${queryParams.toString()}` + + return await this.makeGoogleSheetsRequest({ + endpoint, + method: 'PUT', + body, + params + }) + } catch (error) { + return formatToolError(`Error updating values: ${error}`, params) + } + } +} + +class AppendValuesTool extends BaseGoogleSheetsTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'append_values', + description: 'Append values to a Google Spreadsheet range', + schema: AppendValuesSchema, + baseUrl: '', + method: 'POST', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + let values + try { + values = JSON.parse(params.values) + } catch (error) { + throw new Error('Values must be a valid JSON array') + } + + const body = { + values, + majorDimension: params.majorDimension || 'ROWS' + } + + const queryParams = new URLSearchParams() + queryParams.append('valueInputOption', params.valueInputOption || 'USER_ENTERED') + queryParams.append('insertDataOption', params.insertDataOption || 'OVERWRITE') + + const encodedRange = encodeURIComponent(params.range) + const endpoint = `spreadsheets/${params.spreadsheetId}/values/${encodedRange}:append?${queryParams.toString()}` + + return await this.makeGoogleSheetsRequest({ + endpoint, + method: 'POST', + body, + params + }) + } catch (error) { + return formatToolError(`Error appending values: ${error}`, params) + } + } +} + +class ClearValuesTool extends BaseGoogleSheetsTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'clear_values', + description: 'Clear values from a Google Spreadsheet range', + schema: ClearValuesSchema, + baseUrl: '', + method: 'POST', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const encodedRange = encodeURIComponent(params.range) + const endpoint = `spreadsheets/${params.spreadsheetId}/values/${encodedRange}:clear` + + return await this.makeGoogleSheetsRequest({ + endpoint, + method: 'POST', + body: {}, + params + }) + } catch (error) { + return formatToolError(`Error clearing values: ${error}`, params) + } + } +} + +class BatchGetValuesTool extends BaseGoogleSheetsTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'batch_get_values', + description: 'Get values from multiple Google Spreadsheet ranges', + schema: BatchGetValuesSchema, + baseUrl: '', + method: 'GET', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const queryParams = new URLSearchParams() + + // Add ranges + params.ranges.split(',').forEach((range: string) => { + queryParams.append('ranges', range.trim()) + }) + + if (params.valueRenderOption) queryParams.append('valueRenderOption', params.valueRenderOption) + if (params.dateTimeRenderOption) queryParams.append('dateTimeRenderOption', params.dateTimeRenderOption) + if (params.majorDimension) queryParams.append('majorDimension', params.majorDimension) + + const endpoint = `spreadsheets/${params.spreadsheetId}/values:batchGet?${queryParams.toString()}` + + return await this.makeGoogleSheetsRequest({ + endpoint, + method: 'GET', + params + }) + } catch (error) { + return formatToolError(`Error batch getting values: ${error}`, params) + } + } +} + +class BatchUpdateValuesTool extends BaseGoogleSheetsTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'batch_update_values', + description: 'Update values in multiple Google Spreadsheet ranges', + schema: BatchUpdateValuesSchema, + baseUrl: '', + method: 'POST', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + let valueRanges + try { + valueRanges = JSON.parse(params.values) + } catch (error) { + throw new Error('Values must be a valid JSON array of value ranges') + } + + const body = { + valueInputOption: params.valueInputOption || 'USER_ENTERED', + data: valueRanges, + includeValuesInResponse: params.includeValuesInResponse || false + } + + const endpoint = `spreadsheets/${params.spreadsheetId}/values:batchUpdate` + + return await this.makeGoogleSheetsRequest({ + endpoint, + method: 'POST', + body, + params + }) + } catch (error) { + return formatToolError(`Error batch updating values: ${error}`, params) + } + } +} + +class BatchClearValuesTool extends BaseGoogleSheetsTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'batch_clear_values', + description: 'Clear values from multiple Google Spreadsheet ranges', + schema: BatchClearValuesSchema, + baseUrl: '', + method: 'POST', + headers: {} + } + super({ + ...toolInput, + accessToken: args.accessToken + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const ranges = params.ranges.split(',').map((range: string) => range.trim()) + const body = { ranges } + + const endpoint = `spreadsheets/${params.spreadsheetId}/values:batchClear` + + return await this.makeGoogleSheetsRequest({ + endpoint, + method: 'POST', + body, + params + }) + } catch (error) { + return formatToolError(`Error batch clearing values: ${error}`, params) + } + } +} + +export const createGoogleSheetsTools = (args?: RequestParameters): DynamicStructuredTool[] => { + const { actions = [], accessToken, defaultParams } = args || {} + const tools: DynamicStructuredTool[] = [] + + // Define all available tools + const toolClasses = { + // Spreadsheet tools + createSpreadsheet: CreateSpreadsheetTool, + getSpreadsheet: GetSpreadsheetTool, + updateSpreadsheet: UpdateSpreadsheetTool, + // Values tools + getValues: GetValuesTool, + updateValues: UpdateValuesTool, + appendValues: AppendValuesTool, + clearValues: ClearValuesTool, + batchGetValues: BatchGetValuesTool, + batchUpdateValues: BatchUpdateValuesTool, + batchClearValues: BatchClearValuesTool + } + + // Create tools based on requested actions + actions.forEach((action) => { + const ToolClass = toolClasses[action as keyof typeof toolClasses] + if (ToolClass) { + tools.push(new ToolClass({ accessToken, defaultParams })) + } + }) + + return tools +} diff --git a/packages/components/nodes/tools/GoogleSheets/google-sheets.svg b/packages/components/nodes/tools/GoogleSheets/google-sheets.svg new file mode 100644 index 000000000..43af0ccf1 --- /dev/null +++ b/packages/components/nodes/tools/GoogleSheets/google-sheets.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/components/nodes/tools/JSONPathExtractor/JSONPathExtractor.test.ts b/packages/components/nodes/tools/JSONPathExtractor/JSONPathExtractor.test.ts new file mode 100644 index 000000000..a1c6755f7 --- /dev/null +++ b/packages/components/nodes/tools/JSONPathExtractor/JSONPathExtractor.test.ts @@ -0,0 +1,261 @@ +const { nodeClass: JSONPathExtractor_Tools } = require('./JSONPathExtractor') +import { INodeData } from '../../../src/Interface' + +// Mock the getBaseClasses function +jest.mock('../../../src/utils', () => ({ + getBaseClasses: jest.fn(() => ['Tool', 'StructuredTool']) +})) + +// Helper function to create a valid INodeData object +function createNodeData(id: string, inputs: any): INodeData { + return { + id: id, + label: 'JSON Path Extractor', + name: 'jsonPathExtractor', + type: 'JSONPathExtractor', + icon: 'jsonpathextractor.svg', + version: 1.0, + category: 'Tools', + baseClasses: ['JSONPathExtractor', 'Tool'], + inputs: inputs + } +} + +describe('JSONPathExtractor', () => { + let nodeClass: any + + beforeEach(() => { + nodeClass = new JSONPathExtractor_Tools() + }) + + describe('Tool Initialization', () => { + it('should throw error when path is not provided', async () => { + const nodeData = createNodeData('test-node-1', { + path: '' + }) + + await expect(nodeClass.init(nodeData, '')).rejects.toThrow('JSON Path is required') + }) + + it('should initialize tool with path and default returnNullOnError', async () => { + const nodeData = createNodeData('test-node-2', { + path: 'data.value' + }) + + const tool = await nodeClass.init(nodeData, '') + expect(tool).toBeDefined() + expect(tool.name).toBe('json_path_extractor') + }) + + it('should initialize tool with custom returnNullOnError', async () => { + const nodeData = createNodeData('test-node-3', { + path: 'data.value', + returnNullOnError: true + }) + + const tool = await nodeClass.init(nodeData, '') + expect(tool).toBeDefined() + }) + }) + + describe('JSONPathExtractorTool Functionality', () => { + describe('Positive test cases - Path extraction', () => { + const successCases = [ + { + name: 'simple path from object', + path: 'data.value', + input: { data: { value: 'test' } }, + expected: 'test' + }, + { + name: 'nested path from object', + path: 'user.profile.name', + input: { user: { profile: { name: 'John' } } }, + expected: 'John' + }, + { + name: 'array index access', + path: 'items[0].name', + input: { items: [{ name: 'first' }, { name: 'second' }] }, + expected: 'first' + }, + { + name: 'multi-dimensional array', + path: 'matrix[0][1]', + input: { + matrix: [ + ['a', 'b'], + ['c', 'd'] + ] + }, + expected: 'b' + }, + { + name: 'object return (stringified)', + path: 'data', + input: { data: { nested: 'object' } }, + expected: '{"nested":"object"}' + }, + { + name: 'array return (stringified)', + path: 'tags', + input: { tags: ['a', 'b', 'c'] }, + expected: '["a","b","c"]' + }, + { + name: 'deep nesting', + path: 'a.b.c.d.e', + input: { a: { b: { c: { d: { e: 'deep' } } } } }, + expected: 'deep' + }, + { + name: 'array at root with index', + path: '[1]', + input: ['first', 'second', 'third'], + expected: 'second' + } + ] + + test.each(successCases)('should extract $name', async ({ path, input, expected }) => { + const nodeData = createNodeData(`test-node-${path}`, { + path: path, + returnNullOnError: false + }) + const tool = await nodeClass.init(nodeData, '') + const result = await tool._call({ json: input }) + expect(result).toBe(expected) + }) + }) + + describe('Primitive value handling', () => { + const primitiveTests = [ + { name: 'string', path: 'val', input: { val: 'text' }, expected: 'text' }, + { name: 'number', path: 'val', input: { val: 42 }, expected: '42' }, + { name: 'zero', path: 'val', input: { val: 0 }, expected: '0' }, + { name: 'boolean true', path: 'val', input: { val: true }, expected: 'true' }, + { name: 'boolean false', path: 'val', input: { val: false }, expected: 'false' }, + { name: 'null', path: 'val', input: { val: null }, expected: 'null' }, + { name: 'empty string', path: 'val', input: { val: '' }, expected: '' } + ] + + test.each(primitiveTests)('should handle $name value', async ({ path, input, expected }) => { + const nodeData = createNodeData(`test-primitive`, { + path: path, + returnNullOnError: false + }) + const tool = await nodeClass.init(nodeData, '') + const result = await tool._call({ json: input }) + expect(result).toBe(expected) + }) + }) + + describe('Special characters in keys', () => { + const specialCharTests = [ + { name: 'dashes', path: 'data.key-with-dash', input: { data: { 'key-with-dash': 'value' } } }, + { name: 'spaces', path: 'data.key with spaces', input: { data: { 'key with spaces': 'value' } } }, + { name: 'unicode', path: 'data.emoji๐Ÿ”‘', input: { data: { 'emoji๐Ÿ”‘': 'value' } } }, + { name: 'numeric strings', path: 'data.123', input: { data: { '123': 'value' } } } + ] + + test.each(specialCharTests)('should handle $name in keys', async ({ path, input }) => { + const nodeData = createNodeData(`test-special`, { + path: path, + returnNullOnError: false + }) + const tool = await nodeClass.init(nodeData, '') + const result = await tool._call({ json: input }) + expect(result).toBe('value') + }) + }) + + describe('Error handling - throw mode', () => { + const errorCases = [ + { + name: 'path not found', + path: 'data.value', + input: { data: { other: 'value' } }, + errorPattern: /Path "data.value" not found in JSON/ + }, + { + name: 'invalid JSON string', + path: 'data', + input: 'invalid json', + errorPattern: /Invalid JSON string/ + }, + { + name: 'array index on object', + path: 'data[0]', + input: { data: { key: 'value' } }, + errorPattern: /Path "data\[0\]" not found in JSON/ + }, + { + name: 'out of bounds array', + path: 'items[10]', + input: { items: ['a', 'b'] }, + errorPattern: /Path "items\[10\]" not found in JSON/ + } + ] + + test.each(errorCases)('should throw error for $name', async ({ path, input, errorPattern }) => { + const nodeData = createNodeData(`test-error`, { + path: path, + returnNullOnError: false + }) + const tool = await nodeClass.init(nodeData, '') + await expect(tool._call({ json: input })).rejects.toThrow(errorPattern) + }) + }) + + describe('Error handling - null mode', () => { + const nullCases = [ + { name: 'path not found', path: 'missing.path', input: { data: 'value' } }, + { name: 'invalid JSON string', path: 'data', input: 'invalid json' }, + { name: 'null in path', path: 'data.nested.value', input: { data: { nested: null } } }, + { name: 'empty array access', path: 'items[0]', input: { items: [] } }, + { name: 'property on primitive', path: 'value.nested', input: { value: 'string' } } + ] + + test.each(nullCases)('should return null for $name', async ({ path, input }) => { + const nodeData = createNodeData(`test-null`, { + path: path, + returnNullOnError: true + }) + const tool = await nodeClass.init(nodeData, '') + const result = await tool._call({ json: input }) + expect(result).toBe('null') + }) + + it('should still extract valid paths when returnNullOnError is true', async () => { + const nodeData = createNodeData('test-valid-null-mode', { + path: 'data.value', + returnNullOnError: true + }) + const tool = await nodeClass.init(nodeData, '') + const result = await tool._call({ + json: { data: { value: 'test' } } + }) + expect(result).toBe('test') + }) + }) + + describe('Complex structures', () => { + it('should handle deeply nested arrays and objects', async () => { + const nodeData = createNodeData('test-complex', { + path: 'users[0].addresses[1].city', + returnNullOnError: false + }) + const tool = await nodeClass.init(nodeData, '') + const result = await tool._call({ + json: { + users: [ + { + addresses: [{ city: 'New York' }, { city: 'Los Angeles' }] + } + ] + } + }) + expect(result).toBe('Los Angeles') + }) + }) + }) +}) diff --git a/packages/components/nodes/tools/JSONPathExtractor/JSONPathExtractor.ts b/packages/components/nodes/tools/JSONPathExtractor/JSONPathExtractor.ts new file mode 100644 index 000000000..4ab7adc1c --- /dev/null +++ b/packages/components/nodes/tools/JSONPathExtractor/JSONPathExtractor.ts @@ -0,0 +1,125 @@ +import { z } from 'zod' +import { StructuredTool } from '@langchain/core/tools' +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' +import { get } from 'lodash' + +/** + * Tool that extracts values from JSON using path + */ +class JSONPathExtractorTool extends StructuredTool { + name = 'json_path_extractor' + description = 'Extract value from JSON using configured path' + + schema = z.object({ + json: z + .union([z.string().describe('JSON string'), z.record(z.any()).describe('JSON object'), z.array(z.any()).describe('JSON array')]) + .describe('JSON data to extract value from') + }) + + private readonly path: string + private readonly returnNullOnError: boolean + + constructor(path: string, returnNullOnError: boolean = false) { + super() + this.path = path + this.returnNullOnError = returnNullOnError + } + + async _call({ json }: z.infer): Promise { + // Validate that path is configured + if (!this.path) { + if (this.returnNullOnError) { + return 'null' + } + throw new Error('No extraction path configured') + } + + let data: any + + // Parse JSON string if needed + if (typeof json === 'string') { + try { + data = JSON.parse(json) + } catch (error) { + if (this.returnNullOnError) { + return 'null' + } + throw new Error(`Invalid JSON string: ${error instanceof Error ? error.message : 'Parse error'}`) + } + } else { + data = json + } + + // Extract value using lodash get + const value = get(data, this.path) + + if (value === undefined) { + if (this.returnNullOnError) { + return 'null' + } + const jsonPreview = JSON.stringify(data, null, 2) + const preview = jsonPreview.length > 200 ? jsonPreview.substring(0, 200) + '...' : jsonPreview + throw new Error(`Path "${this.path}" not found in JSON. Received: ${preview}`) + } + + return typeof value === 'string' ? value : JSON.stringify(value) + } +} + +/** + * Node implementation for JSON Path Extractor tool + */ +class JSONPathExtractor_Tools implements INode { + label: string + name: string + version: number + type: string + icon: string + category: string + description: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'JSON Path Extractor' + this.name = 'jsonPathExtractor' + this.version = 1.0 + this.type = 'JSONPathExtractor' + this.icon = 'jsonpathextractor.svg' + this.category = 'Tools' + this.description = 'Extract values from JSON using path expressions' + this.baseClasses = [this.type, ...getBaseClasses(JSONPathExtractorTool)] + this.inputs = [ + { + label: 'JSON Path', + name: 'path', + type: 'string', + description: 'Path to extract. Examples: data, user.name, items[0].id', + placeholder: 'data' + }, + { + label: 'Return Null on Error', + name: 'returnNullOnError', + type: 'boolean', + default: false, + description: 'Return null instead of throwing error when extraction fails', + optional: true, + additionalParams: true + } + ] + } + + async init(nodeData: INodeData, _: string): Promise { + const path = (nodeData.inputs?.path as string) || '' + const returnNullOnError = (nodeData.inputs?.returnNullOnError as boolean) || false + + if (!path) { + throw new Error('JSON Path is required') + } + + return new JSONPathExtractorTool(path, returnNullOnError) + } +} + +module.exports = { nodeClass: JSONPathExtractor_Tools } diff --git a/packages/components/nodes/tools/JSONPathExtractor/jsonpathextractor.svg b/packages/components/nodes/tools/JSONPathExtractor/jsonpathextractor.svg new file mode 100644 index 000000000..30b50a208 --- /dev/null +++ b/packages/components/nodes/tools/JSONPathExtractor/jsonpathextractor.svg @@ -0,0 +1,17 @@ + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/packages/components/nodes/tools/Jira/Jira.ts b/packages/components/nodes/tools/Jira/Jira.ts new file mode 100644 index 000000000..95c2b8c04 --- /dev/null +++ b/packages/components/nodes/tools/Jira/Jira.ts @@ -0,0 +1,449 @@ +import { convertMultiOptionsToStringArray, getCredentialData, getCredentialParam } from '../../../src/utils' +import { createJiraTools } from './core' +import type { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' + +class Jira_Tools implements INode { + label: string + name: string + version: number + type: string + icon: string + category: string + description: string + baseClasses: string[] + credential: INodeParams + inputs: INodeParams[] + + constructor() { + this.label = 'Jira' + this.name = 'jiraTool' + this.version = 1.0 + this.type = 'Jira' + this.icon = 'jira.svg' + this.category = 'Tools' + this.description = 'Perform Jira operations for issues, comments, and users' + this.baseClasses = [this.type, 'Tool'] + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + credentialNames: ['jiraApi'] + } + this.inputs = [ + { + label: 'Host', + name: 'jiraHost', + type: 'string', + placeholder: 'https://example.atlassian.net' + }, + { + label: 'Type', + name: 'jiraType', + type: 'options', + options: [ + { + label: 'Issues', + name: 'issues' + }, + { + label: 'Issue Comments', + name: 'comments' + }, + { + label: 'Users', + name: 'users' + } + ] + }, + // Issue Actions + { + label: 'Issue Actions', + name: 'issueActions', + type: 'multiOptions', + options: [ + { + label: 'List Issues', + name: 'listIssues' + }, + { + label: 'Create Issue', + name: 'createIssue' + }, + { + label: 'Get Issue', + name: 'getIssue' + }, + { + label: 'Update Issue', + name: 'updateIssue' + }, + { + label: 'Delete Issue', + name: 'deleteIssue' + }, + { + label: 'Assign Issue', + name: 'assignIssue' + }, + { + label: 'Transition Issue', + name: 'transitionIssue' + } + ], + show: { + jiraType: ['issues'] + } + }, + // Comment Actions + { + label: 'Comment Actions', + name: 'commentActions', + type: 'multiOptions', + options: [ + { + label: 'List Comments', + name: 'listComments' + }, + { + label: 'Create Comment', + name: 'createComment' + }, + { + label: 'Get Comment', + name: 'getComment' + }, + { + label: 'Update Comment', + name: 'updateComment' + }, + { + label: 'Delete Comment', + name: 'deleteComment' + } + ], + show: { + jiraType: ['comments'] + } + }, + // User Actions + { + label: 'User Actions', + name: 'userActions', + type: 'multiOptions', + options: [ + { + label: 'Search Users', + name: 'searchUsers' + }, + { + label: 'Get User', + name: 'getUser' + }, + { + label: 'Create User', + name: 'createUser' + }, + { + label: 'Update User', + name: 'updateUser' + }, + { + label: 'Delete User', + name: 'deleteUser' + } + ], + show: { + jiraType: ['users'] + } + }, + // ISSUE PARAMETERS + { + label: 'Project Key', + name: 'projectKey', + type: 'string', + placeholder: 'PROJ', + description: 'Project key for the issue', + show: { + issueActions: ['listIssues', 'createIssue'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Issue Type', + name: 'issueType', + type: 'string', + placeholder: 'Bug, Task, Story', + description: 'Type of issue to create', + show: { + issueActions: ['createIssue'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Summary', + name: 'issueSummary', + type: 'string', + description: 'Issue summary/title', + show: { + issueActions: ['createIssue', 'updateIssue'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Description', + name: 'issueDescription', + type: 'string', + description: 'Issue description', + show: { + issueActions: ['createIssue', 'updateIssue'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Priority', + name: 'issuePriority', + type: 'string', + placeholder: 'Highest, High, Medium, Low, Lowest', + description: 'Issue priority', + show: { + issueActions: ['createIssue', 'updateIssue'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Issue Key', + name: 'issueKey', + type: 'string', + placeholder: 'PROJ-123', + description: 'Issue key (e.g., PROJ-123)', + show: { + issueActions: ['getIssue', 'updateIssue', 'deleteIssue', 'assignIssue', 'transitionIssue'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Assignee Account ID', + name: 'assigneeAccountId', + type: 'string', + description: 'Account ID of the user to assign', + show: { + issueActions: ['assignIssue', 'createIssue', 'updateIssue'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Transition ID', + name: 'transitionId', + type: 'string', + description: 'ID of the transition to execute', + show: { + issueActions: ['transitionIssue'] + }, + additionalParams: true, + optional: true + }, + { + label: 'JQL Query', + name: 'jqlQuery', + type: 'string', + placeholder: 'project = PROJ AND status = "To Do"', + description: 'JQL query for filtering issues', + show: { + issueActions: ['listIssues'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Max Results', + name: 'issueMaxResults', + type: 'number', + default: 50, + description: 'Maximum number of issues to return', + show: { + issueActions: ['listIssues'] + }, + additionalParams: true, + optional: true + }, + // COMMENT PARAMETERS + { + label: 'Issue Key (for Comments)', + name: 'commentIssueKey', + type: 'string', + placeholder: 'PROJ-123', + description: 'Issue key for comment operations', + show: { + commentActions: ['listComments', 'createComment'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Comment Text', + name: 'commentText', + type: 'string', + description: 'Comment content', + show: { + commentActions: ['createComment', 'updateComment'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Comment ID', + name: 'commentId', + type: 'string', + description: 'ID of the comment', + show: { + commentActions: ['getComment', 'updateComment', 'deleteComment'] + }, + additionalParams: true, + optional: true + }, + // USER PARAMETERS + { + label: 'Search Query', + name: 'userQuery', + type: 'string', + placeholder: 'john.doe', + description: 'Query string for user search', + show: { + userActions: ['searchUsers'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Account ID', + name: 'userAccountId', + type: 'string', + description: 'User account ID', + show: { + userActions: ['getUser', 'updateUser', 'deleteUser'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Email Address', + name: 'userEmail', + type: 'string', + placeholder: 'user@example.com', + description: 'User email address', + show: { + userActions: ['createUser', 'updateUser'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Display Name', + name: 'userDisplayName', + type: 'string', + description: 'User display name', + show: { + userActions: ['createUser', 'updateUser'] + }, + additionalParams: true, + optional: true + }, + { + label: 'User Max Results', + name: 'userMaxResults', + type: 'number', + default: 50, + description: 'Maximum number of users to return', + show: { + userActions: ['searchUsers'] + }, + additionalParams: true, + optional: true + } + ] + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + let credentialData = await getCredentialData(nodeData.credential ?? '', options) + const username = getCredentialParam('username', credentialData, nodeData) + const accessToken = getCredentialParam('accessToken', credentialData, nodeData) + const jiraHost = nodeData.inputs?.jiraHost as string + + if (!username) { + throw new Error('No username found in credential') + } + + if (!accessToken) { + throw new Error('No access token found in credential') + } + + if (!jiraHost) { + throw new Error('No Jira host provided') + } + + // Get all actions based on type + const jiraType = nodeData.inputs?.jiraType as string + let actions: string[] = [] + + if (jiraType === 'issues') { + actions = convertMultiOptionsToStringArray(nodeData.inputs?.issueActions) + } else if (jiraType === 'comments') { + actions = convertMultiOptionsToStringArray(nodeData.inputs?.commentActions) + } else if (jiraType === 'users') { + actions = convertMultiOptionsToStringArray(nodeData.inputs?.userActions) + } + + const defaultParams = this.transformNodeInputsToToolArgs(nodeData) + + // Create and return tools based on selected actions + const tools = createJiraTools({ + actions, + username, + accessToken, + jiraHost, + defaultParams + }) + + return tools + } + + transformNodeInputsToToolArgs(nodeData: INodeData): Record { + // Collect default parameters from inputs + const defaultParams: Record = {} + + // Issue parameters + if (nodeData.inputs?.projectKey) defaultParams.projectKey = nodeData.inputs.projectKey + if (nodeData.inputs?.issueType) defaultParams.issueType = nodeData.inputs.issueType + if (nodeData.inputs?.issueSummary) defaultParams.issueSummary = nodeData.inputs.issueSummary + if (nodeData.inputs?.issueDescription) defaultParams.issueDescription = nodeData.inputs.issueDescription + if (nodeData.inputs?.issuePriority) defaultParams.issuePriority = nodeData.inputs.issuePriority + if (nodeData.inputs?.issueKey) defaultParams.issueKey = nodeData.inputs.issueKey + if (nodeData.inputs?.assigneeAccountId) defaultParams.assigneeAccountId = nodeData.inputs.assigneeAccountId + if (nodeData.inputs?.transitionId) defaultParams.transitionId = nodeData.inputs.transitionId + if (nodeData.inputs?.jqlQuery) defaultParams.jqlQuery = nodeData.inputs.jqlQuery + if (nodeData.inputs?.issueMaxResults) defaultParams.issueMaxResults = nodeData.inputs.issueMaxResults + + // Comment parameters + if (nodeData.inputs?.commentIssueKey) defaultParams.commentIssueKey = nodeData.inputs.commentIssueKey + if (nodeData.inputs?.commentText) defaultParams.commentText = nodeData.inputs.commentText + if (nodeData.inputs?.commentId) defaultParams.commentId = nodeData.inputs.commentId + + // User parameters + if (nodeData.inputs?.userQuery) defaultParams.userQuery = nodeData.inputs.userQuery + if (nodeData.inputs?.userAccountId) defaultParams.userAccountId = nodeData.inputs.userAccountId + if (nodeData.inputs?.userEmail) defaultParams.userEmail = nodeData.inputs.userEmail + if (nodeData.inputs?.userDisplayName) defaultParams.userDisplayName = nodeData.inputs.userDisplayName + if (nodeData.inputs?.userMaxResults) defaultParams.userMaxResults = nodeData.inputs.userMaxResults + + return defaultParams + } +} + +module.exports = { nodeClass: Jira_Tools } diff --git a/packages/components/nodes/tools/Jira/core.ts b/packages/components/nodes/tools/Jira/core.ts new file mode 100644 index 000000000..07cb078c5 --- /dev/null +++ b/packages/components/nodes/tools/Jira/core.ts @@ -0,0 +1,1172 @@ +import { z } from 'zod' +import fetch from 'node-fetch' +import { DynamicStructuredTool } from '../OpenAPIToolkit/core' +import { TOOL_ARGS_PREFIX, formatToolError } from '../../../src/agents' + +export const desc = `Use this when you want to access Jira API for managing issues, comments, and users` + +export interface Headers { + [key: string]: string +} + +export interface Body { + [key: string]: any +} + +export interface RequestParameters { + headers?: Headers + body?: Body + url?: string + description?: string + maxOutputLength?: number + name?: string + actions?: string[] + username?: string + accessToken?: string + jiraHost?: string + defaultParams?: any +} + +// Define schemas for different Jira operations + +// Issue Schemas +const ListIssuesSchema = z.object({ + projectKey: z.string().optional().describe('Project key to filter issues'), + jql: z.string().optional().describe('JQL query for filtering issues'), + maxResults: z.number().optional().default(50).describe('Maximum number of results to return'), + startAt: z.number().optional().default(0).describe('Index of the first result to return') +}) + +const CreateIssueSchema = z.object({ + projectKey: z.string().describe('Project key where the issue will be created'), + issueType: z.string().describe('Type of issue (Bug, Task, Story, etc.)'), + summary: z.string().describe('Issue summary/title'), + description: z.string().optional().describe('Issue description'), + priority: z.string().optional().describe('Issue priority (Highest, High, Medium, Low, Lowest)'), + assigneeAccountId: z.string().optional().describe('Account ID of the assignee'), + labels: z.array(z.string()).optional().describe('Labels to add to the issue') +}) + +const GetIssueSchema = z.object({ + issueKey: z.string().describe('Issue key (e.g., PROJ-123)') +}) + +const UpdateIssueSchema = z.object({ + issueKey: z.string().describe('Issue key (e.g., PROJ-123)'), + summary: z.string().optional().describe('Updated issue summary/title'), + description: z.string().optional().describe('Updated issue description'), + priority: z.string().optional().describe('Updated issue priority'), + assigneeAccountId: z.string().optional().describe('Account ID of the new assignee') +}) + +const AssignIssueSchema = z.object({ + issueKey: z.string().describe('Issue key (e.g., PROJ-123)'), + assigneeAccountId: z.string().describe('Account ID of the user to assign') +}) + +const TransitionIssueSchema = z.object({ + issueKey: z.string().describe('Issue key (e.g., PROJ-123)'), + transitionId: z.string().describe('ID of the transition to execute') +}) + +// Comment Schemas +const ListCommentsSchema = z.object({ + issueKey: z.string().describe('Issue key to get comments for'), + maxResults: z.number().optional().default(50).describe('Maximum number of results to return'), + startAt: z.number().optional().default(0).describe('Index of the first result to return') +}) + +const CreateCommentSchema = z.object({ + issueKey: z.string().describe('Issue key to add comment to'), + text: z.string().describe('Comment text content'), + visibility: z + .object({ + type: z.string().optional(), + value: z.string().optional() + }) + .optional() + .describe('Comment visibility settings') +}) + +const GetCommentSchema = z.object({ + issueKey: z.string().describe('Issue key'), + commentId: z.string().describe('Comment ID') +}) + +const UpdateCommentSchema = z.object({ + issueKey: z.string().describe('Issue key'), + commentId: z.string().describe('Comment ID'), + text: z.string().describe('Updated comment text') +}) + +const DeleteCommentSchema = z.object({ + issueKey: z.string().describe('Issue key'), + commentId: z.string().describe('Comment ID to delete') +}) + +// User Schemas +const SearchUsersSchema = z.object({ + query: z.string().describe('Query string for user search'), + maxResults: z.number().optional().default(50).describe('Maximum number of results to return'), + startAt: z.number().optional().default(0).describe('Index of the first result to return') +}) + +const GetUserSchema = z.object({ + accountId: z.string().describe('Account ID of the user') +}) + +const CreateUserSchema = z.object({ + emailAddress: z.string().describe('Email address of the user'), + displayName: z.string().describe('Display name of the user'), + username: z.string().optional().describe('Username (deprecated in newer versions)') +}) + +const UpdateUserSchema = z.object({ + accountId: z.string().describe('Account ID of the user'), + emailAddress: z.string().optional().describe('Updated email address'), + displayName: z.string().optional().describe('Updated display name') +}) + +const DeleteUserSchema = z.object({ + accountId: z.string().describe('Account ID of the user to delete') +}) + +class BaseJiraTool extends DynamicStructuredTool { + protected username: string = '' + protected accessToken: string = '' + protected jiraHost: string = '' + + constructor(args: any) { + super(args) + this.username = args.username ?? '' + this.accessToken = args.accessToken ?? '' + this.jiraHost = args.jiraHost ?? '' + } + + async makeJiraRequest({ + endpoint, + method = 'GET', + body, + params + }: { + endpoint: string + method?: string + body?: any + params?: any + }): Promise { + const url = `${this.jiraHost}/rest/api/3/${endpoint}` + const auth = Buffer.from(`${this.username}:${this.accessToken}`).toString('base64') + + const headers = { + Authorization: `Basic ${auth}`, + 'Content-Type': 'application/json', + Accept: 'application/json', + ...this.headers + } + + const response = await fetch(url, { + method, + headers, + body: body ? JSON.stringify(body) : undefined + }) + + if (!response.ok) { + const errorText = await response.text() + throw new Error(`Jira API Error ${response.status}: ${response.statusText} - ${errorText}`) + } + + const data = await response.text() + return data + TOOL_ARGS_PREFIX + JSON.stringify(params) + } +} + +// Issue Tools +class ListIssuesTool extends BaseJiraTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'list_issues', + description: 'List issues from Jira using JQL query', + schema: ListIssuesSchema, + baseUrl: '', + method: 'GET', + headers: {} + } + super({ + ...toolInput, + username: args.username, + accessToken: args.accessToken, + jiraHost: args.jiraHost, + maxOutputLength: args.maxOutputLength + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const queryParams = new URLSearchParams() + + let jql = params.jql || '' + if (params.projectKey && !jql.includes('project')) { + jql = jql ? `project = ${params.projectKey} AND (${jql})` : `project = ${params.projectKey}` + } + + if (jql) queryParams.append('jql', jql) + if (params.maxResults) queryParams.append('maxResults', params.maxResults.toString()) + if (params.startAt) queryParams.append('startAt', params.startAt.toString()) + + const endpoint = `search?${queryParams.toString()}` + + try { + const response = await this.makeJiraRequest({ endpoint, params }) + return response + } catch (error) { + return formatToolError(`Error listing issues: ${error}`, params) + } + } +} + +class CreateIssueTool extends BaseJiraTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'create_issue', + description: 'Create a new issue in Jira', + schema: CreateIssueSchema, + baseUrl: '', + method: 'POST', + headers: {} + } + super({ + ...toolInput, + username: args.username, + accessToken: args.accessToken, + jiraHost: args.jiraHost, + maxOutputLength: args.maxOutputLength + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const issueData: any = { + fields: { + project: { + key: params.projectKey + }, + issuetype: { + name: params.issueType + }, + summary: params.summary + } + } + + if (params.description) { + issueData.fields.description = { + type: 'doc', + version: 1, + content: [ + { + type: 'paragraph', + content: [ + { + type: 'text', + text: params.description + } + ] + } + ] + } + } + + if (params.priority) { + issueData.fields.priority = { + name: params.priority + } + } + + if (params.assigneeAccountId) { + issueData.fields.assignee = { + accountId: params.assigneeAccountId + } + } + + if (params.labels) { + issueData.fields.labels = params.labels + } + + const response = await this.makeJiraRequest({ endpoint: 'issue', method: 'POST', body: issueData, params }) + return response + } catch (error) { + return formatToolError(`Error creating issue: ${error}`, params) + } + } +} + +class GetIssueTool extends BaseJiraTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'get_issue', + description: 'Get a specific issue from Jira', + schema: GetIssueSchema, + baseUrl: '', + method: 'GET', + headers: {} + } + super({ + ...toolInput, + username: args.username, + accessToken: args.accessToken, + jiraHost: args.jiraHost, + maxOutputLength: args.maxOutputLength + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const endpoint = `issue/${params.issueKey}` + const response = await this.makeJiraRequest({ endpoint, params }) + return response + } catch (error) { + return formatToolError(`Error getting issue: ${error}`, params) + } + } +} + +class UpdateIssueTool extends BaseJiraTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'update_issue', + description: 'Update an existing issue in Jira', + schema: UpdateIssueSchema, + baseUrl: '', + method: 'PUT', + headers: {} + } + super({ + ...toolInput, + username: args.username, + accessToken: args.accessToken, + jiraHost: args.jiraHost, + maxOutputLength: args.maxOutputLength + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const updateData: any = { + fields: {} + } + + if (params.summary) updateData.fields.summary = params.summary + if (params.description) { + updateData.fields.description = { + type: 'doc', + version: 1, + content: [ + { + type: 'paragraph', + content: [ + { + type: 'text', + text: params.description + } + ] + } + ] + } + } + if (params.priority) { + updateData.fields.priority = { + name: params.priority + } + } + if (params.assigneeAccountId) { + updateData.fields.assignee = { + accountId: params.assigneeAccountId + } + } + + const endpoint = `issue/${params.issueKey}` + const response = await this.makeJiraRequest({ endpoint, method: 'PUT', body: updateData, params }) + return response || 'Issue updated successfully' + } catch (error) { + return formatToolError(`Error updating issue: ${error}`, params) + } + } +} + +class DeleteIssueTool extends BaseJiraTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'delete_issue', + description: 'Delete an issue from Jira', + schema: GetIssueSchema, + baseUrl: '', + method: 'DELETE', + headers: {} + } + super({ + ...toolInput, + username: args.username, + accessToken: args.accessToken, + jiraHost: args.jiraHost, + maxOutputLength: args.maxOutputLength + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const endpoint = `issue/${params.issueKey}` + const response = await this.makeJiraRequest({ endpoint, method: 'DELETE', params }) + return response || 'Issue deleted successfully' + } catch (error) { + return formatToolError(`Error deleting issue: ${error}`, params) + } + } +} + +class AssignIssueTool extends BaseJiraTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'assign_issue', + description: 'Assign an issue to a user in Jira', + schema: AssignIssueSchema, + baseUrl: '', + method: 'PUT', + headers: {} + } + super({ + ...toolInput, + username: args.username, + accessToken: args.accessToken, + jiraHost: args.jiraHost, + maxOutputLength: args.maxOutputLength + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const assignData = { + accountId: params.assigneeAccountId + } + + const endpoint = `issue/${params.issueKey}/assignee` + const response = await this.makeJiraRequest({ endpoint, method: 'PUT', body: assignData, params }) + return response || 'Issue assigned successfully' + } catch (error) { + return formatToolError(`Error assigning issue: ${error}`, params) + } + } +} + +class TransitionIssueTool extends BaseJiraTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'transition_issue', + description: 'Transition an issue to a different status in Jira', + schema: TransitionIssueSchema, + baseUrl: '', + method: 'POST', + headers: {} + } + super({ + ...toolInput, + username: args.username, + accessToken: args.accessToken, + jiraHost: args.jiraHost, + maxOutputLength: args.maxOutputLength + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const transitionData = { + transition: { + id: params.transitionId + } + } + + const endpoint = `issue/${params.issueKey}/transitions` + const response = await this.makeJiraRequest({ endpoint, method: 'POST', body: transitionData, params }) + return response || 'Issue transitioned successfully' + } catch (error) { + return formatToolError(`Error transitioning issue: ${error}`, params) + } + } +} + +// Comment Tools +class ListCommentsTool extends BaseJiraTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'list_comments', + description: 'List comments for a Jira issue', + schema: ListCommentsSchema, + baseUrl: '', + method: 'GET', + headers: {} + } + super({ + ...toolInput, + username: args.username, + accessToken: args.accessToken, + jiraHost: args.jiraHost, + maxOutputLength: args.maxOutputLength + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const queryParams = new URLSearchParams() + + if (params.maxResults) queryParams.append('maxResults', params.maxResults.toString()) + if (params.startAt) queryParams.append('startAt', params.startAt.toString()) + + const endpoint = `issue/${params.issueKey}/comment?${queryParams.toString()}` + + try { + const response = await this.makeJiraRequest({ endpoint, params }) + return response + } catch (error) { + return formatToolError(`Error listing comments: ${error}`, params) + } + } +} + +class CreateCommentTool extends BaseJiraTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'create_comment', + description: 'Create a comment on a Jira issue', + schema: CreateCommentSchema, + baseUrl: '', + method: 'POST', + headers: {} + } + super({ + ...toolInput, + username: args.username, + accessToken: args.accessToken, + jiraHost: args.jiraHost, + maxOutputLength: args.maxOutputLength + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const commentData: any = { + body: { + type: 'doc', + version: 1, + content: [ + { + type: 'paragraph', + content: [ + { + type: 'text', + text: params.text + } + ] + } + ] + } + } + + if (params.visibility) { + commentData.visibility = params.visibility + } + + const endpoint = `issue/${params.issueKey}/comment` + const response = await this.makeJiraRequest({ endpoint, method: 'POST', body: commentData, params }) + return response + } catch (error) { + return formatToolError(`Error creating comment: ${error}`, params) + } + } +} + +class GetCommentTool extends BaseJiraTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'get_comment', + description: 'Get a specific comment from a Jira issue', + schema: GetCommentSchema, + baseUrl: '', + method: 'GET', + headers: {} + } + super({ + ...toolInput, + username: args.username, + accessToken: args.accessToken, + jiraHost: args.jiraHost, + maxOutputLength: args.maxOutputLength + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const endpoint = `issue/${params.issueKey}/comment/${params.commentId}` + const response = await this.makeJiraRequest({ endpoint, params }) + return response + } catch (error) { + return formatToolError(`Error getting comment: ${error}`, params) + } + } +} + +class UpdateCommentTool extends BaseJiraTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'update_comment', + description: 'Update a comment on a Jira issue', + schema: UpdateCommentSchema, + baseUrl: '', + method: 'PUT', + headers: {} + } + super({ + ...toolInput, + username: args.username, + accessToken: args.accessToken, + jiraHost: args.jiraHost, + maxOutputLength: args.maxOutputLength + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const commentData = { + body: { + type: 'doc', + version: 1, + content: [ + { + type: 'paragraph', + content: [ + { + type: 'text', + text: params.text + } + ] + } + ] + } + } + + const endpoint = `issue/${params.issueKey}/comment/${params.commentId}` + const response = await this.makeJiraRequest({ endpoint, method: 'PUT', body: commentData, params }) + return response || 'Comment updated successfully' + } catch (error) { + return formatToolError(`Error updating comment: ${error}`, params) + } + } +} + +class DeleteCommentTool extends BaseJiraTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'delete_comment', + description: 'Delete a comment from a Jira issue', + schema: DeleteCommentSchema, + baseUrl: '', + method: 'DELETE', + headers: {} + } + super({ + ...toolInput, + username: args.username, + accessToken: args.accessToken, + jiraHost: args.jiraHost, + maxOutputLength: args.maxOutputLength + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const endpoint = `issue/${params.issueKey}/comment/${params.commentId}` + const response = await this.makeJiraRequest({ endpoint, method: 'DELETE', params }) + return response || 'Comment deleted successfully' + } catch (error) { + return formatToolError(`Error deleting comment: ${error}`, params) + } + } +} + +// User Tools +class SearchUsersTool extends BaseJiraTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'search_users', + description: 'Search for users in Jira', + schema: SearchUsersSchema, + baseUrl: '', + method: 'GET', + headers: {} + } + super({ + ...toolInput, + username: args.username, + accessToken: args.accessToken, + jiraHost: args.jiraHost, + maxOutputLength: args.maxOutputLength + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const queryParams = new URLSearchParams() + + if (params.query) queryParams.append('query', params.query) + if (params.maxResults) queryParams.append('maxResults', params.maxResults.toString()) + if (params.startAt) queryParams.append('startAt', params.startAt.toString()) + + const endpoint = `user/search?${queryParams.toString()}` + + try { + const response = await this.makeJiraRequest({ endpoint, params }) + return response + } catch (error) { + return formatToolError(`Error searching users: ${error}`, params) + } + } +} + +class GetUserTool extends BaseJiraTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'get_user', + description: 'Get a specific user from Jira', + schema: GetUserSchema, + baseUrl: '', + method: 'GET', + headers: {} + } + super({ + ...toolInput, + username: args.username, + accessToken: args.accessToken, + jiraHost: args.jiraHost, + maxOutputLength: args.maxOutputLength + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const queryParams = new URLSearchParams() + + queryParams.append('accountId', params.accountId) + + const endpoint = `user?${queryParams.toString()}` + + try { + const response = await this.makeJiraRequest({ endpoint, params }) + return response + } catch (error) { + return formatToolError(`Error getting user: ${error}`, params) + } + } +} + +class CreateUserTool extends BaseJiraTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'create_user', + description: 'Create a new user in Jira', + schema: CreateUserSchema, + baseUrl: '', + method: 'POST', + headers: {} + } + super({ + ...toolInput, + username: args.username, + accessToken: args.accessToken, + jiraHost: args.jiraHost, + maxOutputLength: args.maxOutputLength + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const userData: any = { + emailAddress: params.emailAddress, + displayName: params.displayName + } + + if (params.username) { + userData.username = params.username + } + + const endpoint = 'user' + const response = await this.makeJiraRequest({ endpoint, method: 'POST', body: userData, params }) + return response + } catch (error) { + return formatToolError(`Error creating user: ${error}`, params) + } + } +} + +class UpdateUserTool extends BaseJiraTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'update_user', + description: 'Update an existing user in Jira', + schema: UpdateUserSchema, + baseUrl: '', + method: 'PUT', + headers: {} + } + super({ + ...toolInput, + username: args.username, + accessToken: args.accessToken, + jiraHost: args.jiraHost, + maxOutputLength: args.maxOutputLength + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const userData: any = {} + + if (params.emailAddress) userData.emailAddress = params.emailAddress + if (params.displayName) userData.displayName = params.displayName + + const queryParams = new URLSearchParams() + queryParams.append('accountId', params.accountId) + + const endpoint = `user?${queryParams.toString()}` + const response = await this.makeJiraRequest({ endpoint, method: 'PUT', body: userData, params }) + return response || 'User updated successfully' + } catch (error) { + return formatToolError(`Error updating user: ${error}`, params) + } + } +} + +class DeleteUserTool extends BaseJiraTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'delete_user', + description: 'Delete a user from Jira', + schema: DeleteUserSchema, + baseUrl: '', + method: 'DELETE', + headers: {} + } + super({ + ...toolInput, + username: args.username, + accessToken: args.accessToken, + jiraHost: args.jiraHost, + maxOutputLength: args.maxOutputLength + }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const queryParams = new URLSearchParams() + queryParams.append('accountId', params.accountId) + + const endpoint = `user?${queryParams.toString()}` + const response = await this.makeJiraRequest({ endpoint, method: 'DELETE', params }) + return response || 'User deleted successfully' + } catch (error) { + return formatToolError(`Error deleting user: ${error}`, params) + } + } +} + +export const createJiraTools = (args?: RequestParameters): DynamicStructuredTool[] => { + const tools: DynamicStructuredTool[] = [] + const actions = args?.actions || [] + const username = args?.username || '' + const accessToken = args?.accessToken || '' + const jiraHost = args?.jiraHost || '' + const maxOutputLength = args?.maxOutputLength || Infinity + const defaultParams = args?.defaultParams || {} + + // Issue tools + if (actions.includes('listIssues')) { + tools.push( + new ListIssuesTool({ + username, + accessToken, + jiraHost, + maxOutputLength, + defaultParams + }) + ) + } + + if (actions.includes('createIssue')) { + tools.push( + new CreateIssueTool({ + username, + accessToken, + jiraHost, + maxOutputLength, + defaultParams + }) + ) + } + + if (actions.includes('getIssue')) { + tools.push( + new GetIssueTool({ + username, + accessToken, + jiraHost, + maxOutputLength, + defaultParams + }) + ) + } + + if (actions.includes('updateIssue')) { + tools.push( + new UpdateIssueTool({ + username, + accessToken, + jiraHost, + maxOutputLength, + defaultParams + }) + ) + } + + if (actions.includes('deleteIssue')) { + tools.push( + new DeleteIssueTool({ + username, + accessToken, + jiraHost, + maxOutputLength, + defaultParams + }) + ) + } + + if (actions.includes('assignIssue')) { + tools.push( + new AssignIssueTool({ + username, + accessToken, + jiraHost, + maxOutputLength, + defaultParams + }) + ) + } + + if (actions.includes('transitionIssue')) { + tools.push( + new TransitionIssueTool({ + username, + accessToken, + jiraHost, + maxOutputLength, + defaultParams + }) + ) + } + + // Comment tools + if (actions.includes('listComments')) { + tools.push( + new ListCommentsTool({ + username, + accessToken, + jiraHost, + maxOutputLength, + defaultParams + }) + ) + } + + if (actions.includes('createComment')) { + tools.push( + new CreateCommentTool({ + username, + accessToken, + jiraHost, + maxOutputLength, + defaultParams + }) + ) + } + + if (actions.includes('getComment')) { + tools.push( + new GetCommentTool({ + username, + accessToken, + jiraHost, + maxOutputLength, + defaultParams + }) + ) + } + + if (actions.includes('updateComment')) { + tools.push( + new UpdateCommentTool({ + username, + accessToken, + jiraHost, + maxOutputLength, + defaultParams + }) + ) + } + + if (actions.includes('deleteComment')) { + tools.push( + new DeleteCommentTool({ + username, + accessToken, + jiraHost, + maxOutputLength, + defaultParams + }) + ) + } + + // User tools + if (actions.includes('searchUsers')) { + tools.push( + new SearchUsersTool({ + username, + accessToken, + jiraHost, + maxOutputLength, + defaultParams + }) + ) + } + + if (actions.includes('getUser')) { + tools.push( + new GetUserTool({ + username, + accessToken, + jiraHost, + maxOutputLength, + defaultParams + }) + ) + } + + if (actions.includes('createUser')) { + tools.push( + new CreateUserTool({ + username, + accessToken, + jiraHost, + maxOutputLength, + defaultParams + }) + ) + } + + if (actions.includes('updateUser')) { + tools.push( + new UpdateUserTool({ + username, + accessToken, + jiraHost, + maxOutputLength, + defaultParams + }) + ) + } + + if (actions.includes('deleteUser')) { + tools.push( + new DeleteUserTool({ + username, + accessToken, + jiraHost, + maxOutputLength, + defaultParams + }) + ) + } + + return tools +} diff --git a/packages/components/nodes/tools/Jira/jira.svg b/packages/components/nodes/tools/Jira/jira.svg new file mode 100644 index 000000000..4ace5cc84 --- /dev/null +++ b/packages/components/nodes/tools/Jira/jira.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/components/nodes/tools/MCP/CustomMCP/CustomMCP.ts b/packages/components/nodes/tools/MCP/CustomMCP/CustomMCP.ts index b24144ea5..eb65ecbdf 100644 --- a/packages/components/nodes/tools/MCP/CustomMCP/CustomMCP.ts +++ b/packages/components/nodes/tools/MCP/CustomMCP/CustomMCP.ts @@ -1,12 +1,45 @@ import { Tool } from '@langchain/core/tools' -import { INode, INodeData, INodeOptionsValue, INodeParams } from '../../../../src/Interface' -import { MCPToolkit } from '../core' +import { ICommonObject, IDatabaseEntity, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../../src/Interface' +import { MCPToolkit, validateMCPServerConfig } from '../core' +import { getVars, prepareSandboxVars, parseJsonBody } from '../../../../src/utils' +import { DataSource } from 'typeorm' +import hash from 'object-hash' const mcpServerConfig = `{ "command": "npx", "args": ["-y", "@modelcontextprotocol/server-filesystem", "/path/to/allowed/files"] }` +const howToUseCode = ` +You can use variables in the MCP Server Config with double curly braces \`{{ }}\` and prefix \`$vars.\`. + +For example, you have a variable called "var1": +\`\`\`json +{ + "command": "docker", + "args": [ + "run", + "-i", + "--rm", + "-e", "API_TOKEN" + ], + "env": { + "API_TOKEN": "{{$vars.var1}}" + } +} +\`\`\` + +For example, when using SSE, you can use the variable "var1" in the headers: +\`\`\`json +{ + "url": "https://api.example.com/endpoint/sse", + "headers": { + "Authorization": "Bearer {{$vars.var1}}" + } +} +\`\`\` +` + class Custom_MCP implements INode { label: string name: string @@ -23,7 +56,7 @@ class Custom_MCP implements INode { constructor() { this.label = 'Custom MCP' this.name = 'customMCP' - this.version = 1.0 + this.version = 1.1 this.type = 'Custom MCP Tool' this.icon = 'customMCP.png' this.category = 'Tools (MCP)' @@ -35,7 +68,15 @@ class Custom_MCP implements INode { name: 'mcpServerConfig', type: 'code', hideCodeExecute: true, - placeholder: mcpServerConfig + hint: { + label: 'How to use', + value: howToUseCode + }, + placeholder: mcpServerConfig, + warning: + process.env.CUSTOM_MCP_PROTOCOL === 'sse' + ? 'Only Remote MCP with url is supported. Read more here' + : undefined }, { label: 'Available Actions', @@ -50,9 +91,9 @@ class Custom_MCP implements INode { //@ts-ignore loadMethods = { - listActions: async (nodeData: INodeData): Promise => { + listActions: async (nodeData: INodeData, options: ICommonObject): Promise => { try { - const toolset = await this.getTools(nodeData) + const toolset = await this.getTools(nodeData, options) toolset.sort((a: any, b: any) => a.name.localeCompare(b.name)) return toolset.map(({ name, ...rest }) => ({ @@ -72,8 +113,8 @@ class Custom_MCP implements INode { } } - async init(nodeData: INodeData): Promise { - const tools = await this.getTools(nodeData) + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const tools = await this.getTools(nodeData, options) const _mcpActions = nodeData.inputs?.mcpActions let mcpActions = [] @@ -88,25 +129,63 @@ class Custom_MCP implements INode { return tools.filter((tool: any) => mcpActions.includes(tool.name)) } - async getTools(nodeData: INodeData): Promise { + async getTools(nodeData: INodeData, options: ICommonObject): Promise { const mcpServerConfig = nodeData.inputs?.mcpServerConfig as string - if (!mcpServerConfig) { throw new Error('MCP Server Config is required') } + let sandbox: ICommonObject = {} + const workspaceId = options?.searchOptions?.workspaceId?._value || options?.workspaceId + + if (mcpServerConfig.includes('$vars')) { + const appDataSource = options.appDataSource as DataSource + const databaseEntities = options.databaseEntities as IDatabaseEntity + // If options.workspaceId is not set, create a new options object with the workspaceId for getVars. + const optionsWithWorkspaceId = options.workspaceId ? options : { ...options, workspaceId } + const variables = await getVars(appDataSource, databaseEntities, nodeData, optionsWithWorkspaceId) + sandbox['$vars'] = prepareSandboxVars(variables) + } + + let canonicalConfig + try { + canonicalConfig = JSON.parse(mcpServerConfig) + } catch (e) { + canonicalConfig = mcpServerConfig + } + + const cacheKey = hash({ workspaceId, canonicalConfig, sandbox }) + + if (options.cachePool) { + const cachedResult = await options.cachePool.getMCPCache(cacheKey) + if (cachedResult) { + return cachedResult.tools + } + } + try { let serverParams if (typeof mcpServerConfig === 'object') { - serverParams = mcpServerConfig + serverParams = substituteVariablesInObject(mcpServerConfig, sandbox) } else if (typeof mcpServerConfig === 'string') { - const serverParamsString = convertToValidJSONString(mcpServerConfig) + const substitutedString = substituteVariablesInString(mcpServerConfig, sandbox) + const serverParamsString = convertToValidJSONString(substitutedString) serverParams = JSON.parse(serverParamsString) } + if (process.env.CUSTOM_MCP_SECURITY_CHECK !== 'false') { + try { + validateMCPServerConfig(serverParams) + } catch (error) { + throw new Error(`Security validation failed: ${error.message}`) + } + } + // Compatible with stdio and SSE let toolkit: MCPToolkit - if (serverParams?.command === undefined) { + if (process.env.CUSTOM_MCP_PROTOCOL === 'sse') { + toolkit = new MCPToolkit(serverParams, 'sse') + } else if (serverParams?.command === undefined) { toolkit = new MCPToolkit(serverParams, 'sse') } else { toolkit = new MCPToolkit(serverParams, 'stdio') @@ -116,6 +195,10 @@ class Custom_MCP implements INode { const tools = toolkit.tools ?? [] + if (options.cachePool) { + await options.cachePool.addMCPCache(cacheKey, { toolkit, tools }) + } + return tools as Tool[] } catch (error) { throw new Error(`Invalid MCP Server Config: ${error}`) @@ -123,9 +206,70 @@ class Custom_MCP implements INode { } } +function substituteVariablesInObject(obj: any, sandbox: any): any { + if (typeof obj === 'string') { + // Replace variables in string values + return substituteVariablesInString(obj, sandbox) + } else if (Array.isArray(obj)) { + // Recursively process arrays + return obj.map((item) => substituteVariablesInObject(item, sandbox)) + } else if (obj !== null && typeof obj === 'object') { + // Recursively process object properties + const result: any = {} + for (const [key, value] of Object.entries(obj)) { + result[key] = substituteVariablesInObject(value, sandbox) + } + return result + } + // Return primitive values as-is + return obj +} + +function substituteVariablesInString(str: string, sandbox: any): string { + // Use regex to find {{$variableName.property}} patterns and replace with sandbox values + return str.replace(/\{\{\$([a-zA-Z_][a-zA-Z0-9_]*(?:\.[a-zA-Z_][a-zA-Z0-9_]*)*)\}\}/g, (match, variablePath) => { + try { + // Split the path into parts (e.g., "vars.testvar1" -> ["vars", "testvar1"]) + const pathParts = variablePath.split('.') + + // Start with the sandbox object + let current = sandbox + + // Navigate through the path + for (const part of pathParts) { + // For the first part, check if it exists with $ prefix + if (current === sandbox) { + const sandboxKey = `$${part}` + if (Object.keys(current).includes(sandboxKey)) { + current = current[sandboxKey] + } else { + // If the key doesn't exist, return the original match + return match + } + } else { + // For subsequent parts, access directly + if (current && typeof current === 'object' && part in current) { + current = current[part] + } else { + // If the property doesn't exist, return the original match + return match + } + } + } + + // Return the resolved value, converting to string if necessary + return typeof current === 'string' ? current : JSON.stringify(current) + } catch (error) { + // If any error occurs during resolution, return the original match + console.warn(`Error resolving variable ${match}:`, error) + return match + } + }) +} + function convertToValidJSONString(inputString: string) { try { - const jsObject = Function('return ' + inputString)() + const jsObject = parseJsonBody(inputString) return JSON.stringify(jsObject, null, 2) } catch (error) { console.error('Error converting to JSON:', error) diff --git a/packages/components/nodes/tools/MCP/Supergateway/SupergatewayMCP.ts b/packages/components/nodes/tools/MCP/Supergateway/SupergatewayMCP.ts new file mode 100644 index 000000000..1960928e6 --- /dev/null +++ b/packages/components/nodes/tools/MCP/Supergateway/SupergatewayMCP.ts @@ -0,0 +1,126 @@ +import { Tool } from '@langchain/core/tools' +import { ICommonObject, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../../src/Interface' +import { getNodeModulesPackagePath } from '../../../../src/utils' +import { MCPToolkit, validateMCPServerConfig } from '../core' + +class Supergateway_MCP implements INode { + label: string + name: string + version: number + description: string + type: string + icon: string + category: string + baseClasses: string[] + documentation: string + credential: INodeParams + inputs: INodeParams[] + + constructor() { + this.label = 'Supergateway MCP' + this.name = 'supergatewayMCP' + this.version = 1.0 + this.type = 'Supergateway MCP Tool' + this.icon = 'supermachine-logo.png' + this.category = 'Tools (MCP)' + this.description = 'Runs MCP stdio-based servers over SSE (Server-Sent Events) or WebSockets (WS)' + this.documentation = 'https://github.com/supercorp-ai/supergateway' + this.inputs = [ + { + label: 'Arguments', + name: 'arguments', + type: 'string', + rows: 4, + placeholder: '--sse "https://mcp-server-ab71a6b2-cd55-49d0-adba-562bc85956e3.supermachine.app"', + description: + 'Arguments to pass to the supergateway server. Refer to the documentation for more information.' + }, + { + label: 'Available Actions', + name: 'mcpActions', + type: 'asyncMultiOptions', + loadMethod: 'listActions', + refresh: true + } + ] + this.baseClasses = ['Tool'] + } + + //@ts-ignore + loadMethods = { + listActions: async (nodeData: INodeData, options: ICommonObject): Promise => { + try { + const toolset = await this.getTools(nodeData, options) + toolset.sort((a: any, b: any) => a.name.localeCompare(b.name)) + + return toolset.map(({ name, ...rest }) => ({ + label: name.toUpperCase(), + name: name, + description: rest.description || name + })) + } catch (error) { + return [ + { + label: 'No Available Actions', + name: 'error', + description: 'No available actions, please check the arguments again and refresh' + } + ] + } + } + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const tools = await this.getTools(nodeData, options) + + const _mcpActions = nodeData.inputs?.mcpActions + let mcpActions = [] + if (_mcpActions) { + try { + mcpActions = typeof _mcpActions === 'string' ? JSON.parse(_mcpActions) : _mcpActions + } catch (error) { + console.error('Error parsing mcp actions:', error) + } + } + + return tools.filter((tool: any) => mcpActions.includes(tool.name)) + } + + async getTools(nodeData: INodeData, _: ICommonObject): Promise { + const _args = nodeData.inputs?.arguments as string + const packagePath = getNodeModulesPackagePath('supergateway/dist/index.js') + + const processedArgs = _args + .trim() + .split(/\s+/) + .map((arg) => { + // Remove surrounding double or single quotes if they exist + if ((arg.startsWith('"') && arg.endsWith('"')) || (arg.startsWith("'") && arg.endsWith("'"))) { + return arg.slice(1, -1) + } + return arg + }) + + const serverParams = { + command: 'node', + args: [packagePath, ...processedArgs] + } + + if (process.env.CUSTOM_MCP_SECURITY_CHECK !== 'false') { + try { + validateMCPServerConfig(serverParams) + } catch (error) { + throw new Error(`Security validation failed: ${error.message}`) + } + } + + const toolkit = new MCPToolkit(serverParams, 'stdio') + await toolkit.initialize() + + const tools = toolkit.tools ?? [] + + return tools as Tool[] + } +} + +module.exports = { nodeClass: Supergateway_MCP } diff --git a/packages/components/nodes/tools/MCP/Supergateway/supermachine-logo.png b/packages/components/nodes/tools/MCP/Supergateway/supermachine-logo.png new file mode 100644 index 000000000..4ff417e80 Binary files /dev/null and b/packages/components/nodes/tools/MCP/Supergateway/supermachine-logo.png differ diff --git a/packages/components/nodes/tools/MCP/Teradata/TeradataMCP.ts b/packages/components/nodes/tools/MCP/Teradata/TeradataMCP.ts new file mode 100644 index 000000000..3ccb49115 --- /dev/null +++ b/packages/components/nodes/tools/MCP/Teradata/TeradataMCP.ts @@ -0,0 +1,147 @@ +import { Tool } from '@langchain/core/tools' +import { ICommonObject, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../../src/Interface' +import { getCredentialData, getCredentialParam } from '../../../../src/utils' +import { MCPToolkit } from '../core' +import hash from 'object-hash' + +class Teradata_MCP implements INode { + label: string + name: string + version: number + description: string + type: string + icon: string + category: string + baseClasses: string[] + documentation: string + credential: INodeParams + inputs: INodeParams[] + + constructor() { + this.label = 'Teradata MCP' + this.name = 'teradataMCP' + this.version = 1.0 + this.type = 'Teradata MCP Tool' + this.icon = 'teradata.svg' + this.category = 'Tools (MCP)' + this.description = 'MCP Server for Teradata (remote HTTP streamable)' + this.documentation = 'https://github.com/Teradata/teradata-mcp-server' + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + credentialNames: ['teradataTD2Auth', 'teradataBearerToken'], + description: 'Needed when using Teradata MCP server with authentication' + } + this.inputs = [ + { + label: 'MCP Server URL', + name: 'mcpUrl', + type: 'string', + placeholder: 'http://teradata-mcp-server:8001/mcp', + description: 'URL of your Teradata MCP server', + optional: false + }, + { + label: 'Bearer Token', + name: 'bearerToken', + type: 'string', + optional: true, + description: 'Optional to override Default set credentials' + }, + { + label: 'Available Actions', + name: 'mcpActions', + type: 'asyncMultiOptions', + loadMethod: 'listActions', + refresh: true + } + ] + this.baseClasses = ['Tool'] + } + + //@ts-ignore + loadMethods = { + listActions: async (nodeData: INodeData, options: ICommonObject): Promise => { + try { + const toolset = await this.getTools(nodeData, options) + toolset.sort((a: any, b: any) => a.name.localeCompare(b.name)) + return toolset.map(({ name, ...rest }) => ({ + label: name.toUpperCase(), + name: name, + description: rest.description || name + })) + } catch (error) { + console.error('Error listing actions:', error) + return [ + { + label: 'No Available Actions', + name: 'error', + description: 'No available actions, please check your MCP server URL and credentials, then refresh.' + } + ] + } + } + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const tools = await this.getTools(nodeData, options) + const _mcpActions = nodeData.inputs?.mcpActions + let mcpActions = [] + if (_mcpActions) { + try { + mcpActions = typeof _mcpActions === 'string' ? JSON.parse(_mcpActions) : _mcpActions + } catch (error) { + console.error('Error parsing mcp actions:', error) + } + } + return tools.filter((tool: any) => mcpActions.includes(tool.name)) + } + + async getTools(nodeData: INodeData, options: ICommonObject): Promise { + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const mcpUrl = nodeData.inputs?.mcpUrl || 'http://teradata-mcp-server:8001/mcp' + if (!mcpUrl) { + throw new Error('Missing MCP Server URL') + } + // Determine auth method from credentials + let serverParams: any = { + url: mcpUrl, + headers: {} + } + // Get Bearer token from node input (from agent flow) or credential store + const bearerToken = nodeData.inputs?.bearerToken || getCredentialParam('token', credentialData, nodeData) + const username = getCredentialParam('tdUsername', credentialData, nodeData) + const password = getCredentialParam('tdPassword', credentialData, nodeData) + + if (bearerToken) { + serverParams.headers['Authorization'] = `Bearer ${bearerToken}` + } else if (username && password) { + serverParams.headers['Authorization'] = 'Basic ' + Buffer.from(`${username}:${password}`).toString('base64') + } else { + throw new Error('Missing credentials: provide Bearer token from flow/credentials OR username/password from credentials') + } + const workspaceId = options?.searchOptions?.workspaceId?._value || options?.workspaceId || 'tdws_default' + let sandbox: ICommonObject = {} + const cacheKey = hash({ workspaceId, serverParams, sandbox }) + if (options.cachePool) { + const cachedResult = await options.cachePool.getMCPCache(cacheKey) + if (cachedResult) { + if (cachedResult.tools.length > 0) { + return cachedResult.tools + } + } + } + + // Use SSE for remote HTTP MCP servers + const toolkit = new MCPToolkit(serverParams, 'sse') + await toolkit.initialize() + const tools = toolkit.tools ?? [] + if (options.cachePool) { + await options.cachePool.addMCPCache(cacheKey, { toolkit, tools }) + } + return tools as Tool[] + } +} + +module.exports = { nodeClass: Teradata_MCP } diff --git a/packages/components/nodes/tools/MCP/Teradata/teradata.svg b/packages/components/nodes/tools/MCP/Teradata/teradata.svg new file mode 100644 index 000000000..38c343532 --- /dev/null +++ b/packages/components/nodes/tools/MCP/Teradata/teradata.svg @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + + + + + diff --git a/packages/components/nodes/tools/MCP/core.ts b/packages/components/nodes/tools/MCP/core.ts index 7c894fcc4..36a266089 100644 --- a/packages/components/nodes/tools/MCP/core.ts +++ b/packages/components/nodes/tools/MCP/core.ts @@ -53,10 +53,29 @@ export class MCPToolkit extends BaseToolkit { const baseUrl = new URL(this.serverParams.url) try { - transport = new StreamableHTTPClientTransport(baseUrl) + if (this.serverParams.headers) { + transport = new StreamableHTTPClientTransport(baseUrl, { + requestInit: { + headers: this.serverParams.headers + } + }) + } else { + transport = new StreamableHTTPClientTransport(baseUrl) + } await client.connect(transport) } catch (error) { - transport = new SSEClientTransport(baseUrl) + if (this.serverParams.headers) { + transport = new SSEClientTransport(baseUrl, { + requestInit: { + headers: this.serverParams.headers + }, + eventSourceInit: { + fetch: (url, init) => fetch(url, { ...init, headers: this.serverParams.headers }) + } + }) + } else { + transport = new SSEClientTransport(baseUrl) + } await client.connect(transport) } } @@ -92,7 +111,13 @@ export class MCPToolkit extends BaseToolkit { argsSchema: createSchemaModel(tool.inputSchema) }) }) - return Promise.all(toolsPromises) + const res = await Promise.allSettled(toolsPromises) + const errors = res.filter((r) => r.status === 'rejected') + if (errors.length !== 0) { + console.error('MCP Tools failed to be resolved', errors) + } + const successes = res.filter((r) => r.status === 'fulfilled').map((r) => r.value) + return successes } } @@ -113,7 +138,7 @@ export async function MCPTool({ const client = await toolkit.createClient() try { - const req: CallToolRequest = { method: 'tools/call', params: { name: name, arguments: input } } + const req: CallToolRequest = { method: 'tools/call', params: { name: name, arguments: input as any } } const res = await client.request(req, CallToolResultSchema) const content = res.content const contentString = JSON.stringify(content) @@ -148,3 +173,113 @@ function createSchemaModel( return z.object(schemaProperties) } + +export const validateArgsForLocalFileAccess = (args: string[]): void => { + const dangerousPatterns = [ + // Absolute paths + /^\/[^/]/, // Unix absolute paths starting with / + /^[a-zA-Z]:\\/, // Windows absolute paths like C:\ + + // Relative paths that could escape current directory + /\.\.\//, // Parent directory traversal with ../ + /\.\.\\/, // Parent directory traversal with ..\ + /^\.\./, // Starting with .. + + // Local file access patterns + /^\.\//, // Current directory with ./ + /^~\//, // Home directory with ~/ + /^file:\/\//, // File protocol + + // Common file extensions that shouldn't be accessed + /\.(exe|bat|cmd|sh|ps1|vbs|scr|com|pif|dll|sys)$/i, + + // File flags and options that could access local files + /^--?(?:file|input|output|config|load|save|import|export|read|write)=/i, + /^--?(?:file|input|output|config|load|save|import|export|read|write)$/i + ] + + for (const arg of args) { + if (typeof arg !== 'string') continue + + // Check for dangerous patterns + for (const pattern of dangerousPatterns) { + if (pattern.test(arg)) { + throw new Error(`Argument contains potential local file access: "${arg}"`) + } + } + + // Check for null bytes + if (arg.includes('\0')) { + throw new Error(`Argument contains null byte: "${arg}"`) + } + + // Check for very long paths that might be used for buffer overflow attacks + if (arg.length > 1000) { + throw new Error(`Argument is suspiciously long (${arg.length} characters): "${arg.substring(0, 100)}..."`) + } + } +} + +export const validateCommandInjection = (args: string[]): void => { + const dangerousPatterns = [ + // Shell metacharacters + /[;&|`$(){}[\]<>]/, + // Command chaining + /&&|\|\||;;/, + // Redirections + />>|<<|>/, + // Backticks and command substitution + /`|\$\(/, + // Process substitution + /<\(|>\(/ + ] + + for (const arg of args) { + if (typeof arg !== 'string') continue + + for (const pattern of dangerousPatterns) { + if (pattern.test(arg)) { + throw new Error(`Argument contains potentially dangerous characters: "${arg}"`) + } + } + } +} + +export const validateEnvironmentVariables = (env: Record): void => { + const dangerousEnvVars = ['PATH', 'LD_LIBRARY_PATH', 'DYLD_LIBRARY_PATH'] + + for (const [key, value] of Object.entries(env)) { + if (dangerousEnvVars.includes(key)) { + throw new Error(`Environment variable '${key}' modification is not allowed`) + } + + if (typeof value === 'string' && value.includes('\0')) { + throw new Error(`Environment variable '${key}' contains null byte`) + } + } +} + +export const validateMCPServerConfig = (serverParams: any): void => { + // Validate the entire server configuration + if (!serverParams || typeof serverParams !== 'object') { + throw new Error('Invalid server configuration') + } + + // Command allowlist - only allow specific safe commands + const allowedCommands = ['node', 'npx', 'python', 'python3', 'docker'] + + if (serverParams.command && !allowedCommands.includes(serverParams.command)) { + throw new Error(`Command '${serverParams.command}' is not allowed. Allowed commands: ${allowedCommands.join(', ')}`) + } + + // Validate arguments if present + if (serverParams.args && Array.isArray(serverParams.args)) { + validateArgsForLocalFileAccess(serverParams.args) + validateCommandInjection(serverParams.args) + } + + // Validate environment variables + if (serverParams.env) { + validateEnvironmentVariables(serverParams.env) + } +} diff --git a/packages/components/nodes/tools/MicrosoftOutlook/MicrosoftOutlook.ts b/packages/components/nodes/tools/MicrosoftOutlook/MicrosoftOutlook.ts new file mode 100644 index 000000000..a85af7221 --- /dev/null +++ b/packages/components/nodes/tools/MicrosoftOutlook/MicrosoftOutlook.ts @@ -0,0 +1,822 @@ +import { convertMultiOptionsToStringArray, getCredentialData, getCredentialParam, refreshOAuth2Token } from '../../../src/utils' +import { createOutlookTools } from './core' +import type { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' + +class MicrosoftOutlook_Tools implements INode { + label: string + name: string + version: number + type: string + icon: string + category: string + description: string + baseClasses: string[] + credential: INodeParams + inputs: INodeParams[] + + constructor() { + this.label = 'Microsoft Outlook' + this.name = 'microsoftOutlook' + this.version = 1.0 + this.type = 'MicrosoftOutlook' + this.icon = 'outlook.svg' + this.category = 'Tools' + this.description = 'Perform Microsoft Outlook operations for calendars, events, and messages' + this.baseClasses = [this.type, 'Tool'] + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + credentialNames: ['microsoftOutlookOAuth2'] + } + this.inputs = [ + { + label: 'Type', + name: 'outlookType', + type: 'options', + options: [ + { + label: 'Calendar', + name: 'calendar' + }, + { + label: 'Message', + name: 'message' + } + ] + }, + // Calendar Actions + { + label: 'Calendar Actions', + name: 'calendarActions', + type: 'multiOptions', + options: [ + { + label: 'List Calendars', + name: 'listCalendars' + }, + { + label: 'Get Calendar', + name: 'getCalendar' + }, + { + label: 'Create Calendar', + name: 'createCalendar' + }, + { + label: 'Update Calendar', + name: 'updateCalendar' + }, + { + label: 'Delete Calendar', + name: 'deleteCalendar' + }, + { + label: 'List Events', + name: 'listEvents' + }, + { + label: 'Get Event', + name: 'getEvent' + }, + { + label: 'Create Event', + name: 'createEvent' + }, + { + label: 'Update Event', + name: 'updateEvent' + }, + { + label: 'Delete Event', + name: 'deleteEvent' + } + ], + show: { + outlookType: ['calendar'] + } + }, + // Message Actions + { + label: 'Message Actions', + name: 'messageActions', + type: 'multiOptions', + options: [ + { + label: 'List Messages', + name: 'listMessages' + }, + { + label: 'Get Message', + name: 'getMessage' + }, + { + label: 'Create Draft Message', + name: 'createDraftMessage' + }, + { + label: 'Send Message', + name: 'sendMessage' + }, + { + label: 'Update Message', + name: 'updateMessage' + }, + { + label: 'Delete Message', + name: 'deleteMessage' + }, + { + label: 'Copy Message', + name: 'copyMessage' + }, + { + label: 'Move Message', + name: 'moveMessage' + }, + { + label: 'Reply to Message', + name: 'replyMessage' + }, + { + label: 'Forward Message', + name: 'forwardMessage' + } + ], + show: { + outlookType: ['message'] + } + }, + // CALENDAR PARAMETERS + // List Calendars Parameters + { + label: 'Max Results [List Calendars]', + name: 'maxResultsListCalendars', + type: 'number', + description: 'Maximum number of calendars to return', + default: 50, + show: { + outlookType: ['calendar'], + calendarActions: ['listCalendars'] + }, + additionalParams: true, + optional: true + }, + // Get Calendar Parameters + { + label: 'Calendar ID [Get Calendar]', + name: 'calendarIdGetCalendar', + type: 'string', + description: 'ID of the calendar to retrieve', + show: { + outlookType: ['calendar'], + calendarActions: ['getCalendar'] + }, + additionalParams: true, + optional: true + }, + // Create Calendar Parameters + { + label: 'Calendar Name [Create Calendar]', + name: 'calendarNameCreateCalendar', + type: 'string', + description: 'Name of the calendar', + placeholder: 'My New Calendar', + show: { + outlookType: ['calendar'], + calendarActions: ['createCalendar'] + }, + additionalParams: true, + optional: true + }, + // Update Calendar Parameters + { + label: 'Calendar ID [Update Calendar]', + name: 'calendarIdUpdateCalendar', + type: 'string', + description: 'ID of the calendar to update', + show: { + outlookType: ['calendar'], + calendarActions: ['updateCalendar'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Calendar Name [Update Calendar]', + name: 'calendarNameUpdateCalendar', + type: 'string', + description: 'New name of the calendar', + show: { + outlookType: ['calendar'], + calendarActions: ['updateCalendar'] + }, + additionalParams: true, + optional: true + }, + // Delete Calendar Parameters + { + label: 'Calendar ID [Delete Calendar]', + name: 'calendarIdDeleteCalendar', + type: 'string', + description: 'ID of the calendar to delete', + show: { + outlookType: ['calendar'], + calendarActions: ['deleteCalendar'] + }, + additionalParams: true, + optional: true + }, + // List Events Parameters + { + label: 'Calendar ID [List Events]', + name: 'calendarIdListEvents', + type: 'string', + description: 'ID of the calendar (leave empty for primary calendar)', + show: { + outlookType: ['calendar'], + calendarActions: ['listEvents'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Max Results [List Events]', + name: 'maxResultsListEvents', + type: 'number', + description: 'Maximum number of events to return', + default: 50, + show: { + outlookType: ['calendar'], + calendarActions: ['listEvents'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Start Date Time [List Events]', + name: 'startDateTimeListEvents', + type: 'string', + description: 'Start date time filter in ISO format', + placeholder: '2024-01-01T00:00:00Z', + show: { + outlookType: ['calendar'], + calendarActions: ['listEvents'] + }, + additionalParams: true, + optional: true + }, + { + label: 'End Date Time [List Events]', + name: 'endDateTimeListEvents', + type: 'string', + description: 'End date time filter in ISO format', + placeholder: '2024-12-31T23:59:59Z', + show: { + outlookType: ['calendar'], + calendarActions: ['listEvents'] + }, + additionalParams: true, + optional: true + }, + // Get Event Parameters + { + label: 'Event ID [Get Event]', + name: 'eventIdGetEvent', + type: 'string', + description: 'ID of the event to retrieve', + show: { + outlookType: ['calendar'], + calendarActions: ['getEvent'] + }, + additionalParams: true, + optional: true + }, + // Create Event Parameters + { + label: 'Subject [Create Event]', + name: 'subjectCreateEvent', + type: 'string', + description: 'Subject/title of the event', + placeholder: 'Meeting Title', + show: { + outlookType: ['calendar'], + calendarActions: ['createEvent'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Body [Create Event]', + name: 'bodyCreateEvent', + type: 'string', + description: 'Body/description of the event', + placeholder: 'Meeting description', + rows: 3, + show: { + outlookType: ['calendar'], + calendarActions: ['createEvent'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Start Date Time [Create Event]', + name: 'startDateTimeCreateEvent', + type: 'string', + description: 'Start date and time in ISO format', + placeholder: '2024-01-15T10:00:00', + show: { + outlookType: ['calendar'], + calendarActions: ['createEvent'] + }, + additionalParams: true, + optional: true + }, + { + label: 'End Date Time [Create Event]', + name: 'endDateTimeCreateEvent', + type: 'string', + description: 'End date and time in ISO format', + placeholder: '2024-01-15T11:00:00', + show: { + outlookType: ['calendar'], + calendarActions: ['createEvent'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Time Zone [Create Event]', + name: 'timeZoneCreateEvent', + type: 'string', + description: 'Time zone for the event', + placeholder: 'UTC', + default: 'UTC', + show: { + outlookType: ['calendar'], + calendarActions: ['createEvent'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Location [Create Event]', + name: 'locationCreateEvent', + type: 'string', + description: 'Location of the event', + placeholder: 'Conference Room A', + show: { + outlookType: ['calendar'], + calendarActions: ['createEvent'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Attendees [Create Event]', + name: 'attendeesCreateEvent', + type: 'string', + description: 'Comma-separated list of attendee email addresses', + placeholder: 'user1@example.com,user2@example.com', + show: { + outlookType: ['calendar'], + calendarActions: ['createEvent'] + }, + additionalParams: true, + optional: true + }, + // Update Event Parameters + { + label: 'Event ID [Update Event]', + name: 'eventIdUpdateEvent', + type: 'string', + description: 'ID of the event to update', + show: { + outlookType: ['calendar'], + calendarActions: ['updateEvent'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Subject [Update Event]', + name: 'subjectUpdateEvent', + type: 'string', + description: 'New subject/title of the event', + show: { + outlookType: ['calendar'], + calendarActions: ['updateEvent'] + }, + additionalParams: true, + optional: true + }, + // Delete Event Parameters + { + label: 'Event ID [Delete Event]', + name: 'eventIdDeleteEvent', + type: 'string', + description: 'ID of the event to delete', + show: { + outlookType: ['calendar'], + calendarActions: ['deleteEvent'] + }, + additionalParams: true, + optional: true + }, + // MESSAGE PARAMETERS + // List Messages Parameters + { + label: 'Max Results [List Messages]', + name: 'maxResultsListMessages', + type: 'number', + description: 'Maximum number of messages to return', + default: 50, + show: { + outlookType: ['message'], + messageActions: ['listMessages'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Filter [List Messages]', + name: 'filterListMessages', + type: 'string', + description: 'Filter query (e.g., "isRead eq false")', + show: { + outlookType: ['message'], + messageActions: ['listMessages'] + }, + additionalParams: true, + optional: true + }, + // Get Message Parameters + { + label: 'Message ID [Get Message]', + name: 'messageIdGetMessage', + type: 'string', + description: 'ID of the message to retrieve', + show: { + outlookType: ['message'], + messageActions: ['getMessage'] + }, + additionalParams: true, + optional: true + }, + // Create Draft Message Parameters + { + label: 'To [Create Draft Message]', + name: 'toCreateDraftMessage', + type: 'string', + description: 'Recipient email address(es), comma-separated', + placeholder: 'user@example.com', + show: { + outlookType: ['message'], + messageActions: ['createDraftMessage'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Subject [Create Draft Message]', + name: 'subjectCreateDraftMessage', + type: 'string', + description: 'Subject of the message', + placeholder: 'Email Subject', + show: { + outlookType: ['message'], + messageActions: ['createDraftMessage'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Body [Create Draft Message]', + name: 'bodyCreateDraftMessage', + type: 'string', + description: 'Body content of the message', + placeholder: 'Email body content', + rows: 4, + show: { + outlookType: ['message'], + messageActions: ['createDraftMessage'] + }, + additionalParams: true, + optional: true + }, + { + label: 'CC [Create Draft Message]', + name: 'ccCreateDraftMessage', + type: 'string', + description: 'CC email address(es), comma-separated', + placeholder: 'cc@example.com', + show: { + outlookType: ['message'], + messageActions: ['createDraftMessage'] + }, + additionalParams: true, + optional: true + }, + { + label: 'BCC [Create Draft Message]', + name: 'bccCreateDraftMessage', + type: 'string', + description: 'BCC email address(es), comma-separated', + placeholder: 'bcc@example.com', + show: { + outlookType: ['message'], + messageActions: ['createDraftMessage'] + }, + additionalParams: true, + optional: true + }, + // Send Message Parameters + { + label: 'To [Send Message]', + name: 'toSendMessage', + type: 'string', + description: 'Recipient email address(es), comma-separated', + placeholder: 'user@example.com', + show: { + outlookType: ['message'], + messageActions: ['sendMessage'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Subject [Send Message]', + name: 'subjectSendMessage', + type: 'string', + description: 'Subject of the message', + placeholder: 'Email Subject', + show: { + outlookType: ['message'], + messageActions: ['sendMessage'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Body [Send Message]', + name: 'bodySendMessage', + type: 'string', + description: 'Body content of the message', + placeholder: 'Email body content', + rows: 4, + show: { + outlookType: ['message'], + messageActions: ['sendMessage'] + }, + additionalParams: true, + optional: true + }, + // Update Message Parameters + { + label: 'Message ID [Update Message]', + name: 'messageIdUpdateMessage', + type: 'string', + description: 'ID of the message to update', + show: { + outlookType: ['message'], + messageActions: ['updateMessage'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Is Read [Update Message]', + name: 'isReadUpdateMessage', + type: 'boolean', + description: 'Mark message as read/unread', + show: { + outlookType: ['message'], + messageActions: ['updateMessage'] + }, + additionalParams: true, + optional: true + }, + // Delete Message Parameters + { + label: 'Message ID [Delete Message]', + name: 'messageIdDeleteMessage', + type: 'string', + description: 'ID of the message to delete', + show: { + outlookType: ['message'], + messageActions: ['deleteMessage'] + }, + additionalParams: true, + optional: true + }, + // Copy Message Parameters + { + label: 'Message ID [Copy Message]', + name: 'messageIdCopyMessage', + type: 'string', + description: 'ID of the message to copy', + show: { + outlookType: ['message'], + messageActions: ['copyMessage'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Destination Folder ID [Copy Message]', + name: 'destinationFolderIdCopyMessage', + type: 'string', + description: 'ID of the destination folder', + show: { + outlookType: ['message'], + messageActions: ['copyMessage'] + }, + additionalParams: true, + optional: true + }, + // Move Message Parameters + { + label: 'Message ID [Move Message]', + name: 'messageIdMoveMessage', + type: 'string', + description: 'ID of the message to move', + show: { + outlookType: ['message'], + messageActions: ['moveMessage'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Destination Folder ID [Move Message]', + name: 'destinationFolderIdMoveMessage', + type: 'string', + description: 'ID of the destination folder', + show: { + outlookType: ['message'], + messageActions: ['moveMessage'] + }, + additionalParams: true, + optional: true + }, + // Reply Message Parameters + { + label: 'Message ID [Reply Message]', + name: 'messageIdReplyMessage', + type: 'string', + description: 'ID of the message to reply to', + show: { + outlookType: ['message'], + messageActions: ['replyMessage'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Reply Body [Reply Message]', + name: 'replyBodyReplyMessage', + type: 'string', + description: 'Reply message body', + rows: 4, + show: { + outlookType: ['message'], + messageActions: ['replyMessage'] + }, + additionalParams: true, + optional: true + }, + // Forward Message Parameters + { + label: 'Message ID [Forward Message]', + name: 'messageIdForwardMessage', + type: 'string', + description: 'ID of the message to forward', + show: { + outlookType: ['message'], + messageActions: ['forwardMessage'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Forward To [Forward Message]', + name: 'forwardToForwardMessage', + type: 'string', + description: 'Email address(es) to forward to, comma-separated', + show: { + outlookType: ['message'], + messageActions: ['forwardMessage'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Forward Comment [Forward Message]', + name: 'forwardCommentForwardMessage', + type: 'string', + description: 'Additional comment to include with forward', + rows: 2, + show: { + outlookType: ['message'], + messageActions: ['forwardMessage'] + }, + additionalParams: true, + optional: true + } + ] + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const outlookType = nodeData.inputs?.outlookType as string + const calendarActions = nodeData.inputs?.calendarActions as string + const messageActions = nodeData.inputs?.messageActions as string + + let credentialData = await getCredentialData(nodeData.credential ?? '', options) + credentialData = await refreshOAuth2Token(nodeData.credential ?? '', credentialData, options) + const accessToken = getCredentialParam('access_token', credentialData, nodeData) + + if (!accessToken) { + throw new Error('No access token found in credential') + } + + let actions: string[] = [] + if (outlookType === 'calendar') { + actions = convertMultiOptionsToStringArray(calendarActions) + } else if (outlookType === 'message') { + actions = convertMultiOptionsToStringArray(messageActions) + } + + const defaultParams = this.transformNodeInputsToToolArgs(nodeData) + + const outlookTools = createOutlookTools({ + accessToken, + actions, + defaultParams + }) + + return outlookTools + } + + transformNodeInputsToToolArgs(nodeData: INodeData): Record { + // Collect default parameters from inputs + const defaultParams: Record = {} + + // Calendar parameters + if (nodeData.inputs?.maxResultsListCalendars) defaultParams.maxResultsListCalendars = nodeData.inputs.maxResultsListCalendars + if (nodeData.inputs?.calendarIdGetCalendar) defaultParams.calendarIdGetCalendar = nodeData.inputs.calendarIdGetCalendar + if (nodeData.inputs?.calendarNameCreateCalendar) + defaultParams.calendarNameCreateCalendar = nodeData.inputs.calendarNameCreateCalendar + if (nodeData.inputs?.calendarIdUpdateCalendar) defaultParams.calendarIdUpdateCalendar = nodeData.inputs.calendarIdUpdateCalendar + if (nodeData.inputs?.calendarNameUpdateCalendar) + defaultParams.calendarNameUpdateCalendar = nodeData.inputs.calendarNameUpdateCalendar + if (nodeData.inputs?.calendarIdDeleteCalendar) defaultParams.calendarIdDeleteCalendar = nodeData.inputs.calendarIdDeleteCalendar + if (nodeData.inputs?.calendarIdListEvents) defaultParams.calendarIdListEvents = nodeData.inputs.calendarIdListEvents + if (nodeData.inputs?.maxResultsListEvents) defaultParams.maxResultsListEvents = nodeData.inputs.maxResultsListEvents + if (nodeData.inputs?.startDateTimeListEvents) defaultParams.startDateTimeListEvents = nodeData.inputs.startDateTimeListEvents + if (nodeData.inputs?.endDateTimeListEvents) defaultParams.endDateTimeListEvents = nodeData.inputs.endDateTimeListEvents + if (nodeData.inputs?.eventIdGetEvent) defaultParams.eventIdGetEvent = nodeData.inputs.eventIdGetEvent + if (nodeData.inputs?.subjectCreateEvent) defaultParams.subjectCreateEvent = nodeData.inputs.subjectCreateEvent + if (nodeData.inputs?.bodyCreateEvent) defaultParams.bodyCreateEvent = nodeData.inputs.bodyCreateEvent + if (nodeData.inputs?.startDateTimeCreateEvent) defaultParams.startDateTimeCreateEvent = nodeData.inputs.startDateTimeCreateEvent + if (nodeData.inputs?.endDateTimeCreateEvent) defaultParams.endDateTimeCreateEvent = nodeData.inputs.endDateTimeCreateEvent + if (nodeData.inputs?.timeZoneCreateEvent) defaultParams.timeZoneCreateEvent = nodeData.inputs.timeZoneCreateEvent + if (nodeData.inputs?.locationCreateEvent) defaultParams.locationCreateEvent = nodeData.inputs.locationCreateEvent + if (nodeData.inputs?.attendeesCreateEvent) defaultParams.attendeesCreateEvent = nodeData.inputs.attendeesCreateEvent + if (nodeData.inputs?.eventIdUpdateEvent) defaultParams.eventIdUpdateEvent = nodeData.inputs.eventIdUpdateEvent + if (nodeData.inputs?.subjectUpdateEvent) defaultParams.subjectUpdateEvent = nodeData.inputs.subjectUpdateEvent + if (nodeData.inputs?.eventIdDeleteEvent) defaultParams.eventIdDeleteEvent = nodeData.inputs.eventIdDeleteEvent + + // Message parameters + if (nodeData.inputs?.maxResultsListMessages) defaultParams.maxResultsListMessages = nodeData.inputs.maxResultsListMessages + if (nodeData.inputs?.filterListMessages) defaultParams.filterListMessages = nodeData.inputs.filterListMessages + if (nodeData.inputs?.messageIdGetMessage) defaultParams.messageIdGetMessage = nodeData.inputs.messageIdGetMessage + if (nodeData.inputs?.toCreateDraftMessage) defaultParams.toCreateDraftMessage = nodeData.inputs.toCreateDraftMessage + if (nodeData.inputs?.subjectCreateDraftMessage) defaultParams.subjectCreateDraftMessage = nodeData.inputs.subjectCreateDraftMessage + if (nodeData.inputs?.bodyCreateDraftMessage) defaultParams.bodyCreateDraftMessage = nodeData.inputs.bodyCreateDraftMessage + if (nodeData.inputs?.ccCreateDraftMessage) defaultParams.ccCreateDraftMessage = nodeData.inputs.ccCreateDraftMessage + if (nodeData.inputs?.bccCreateDraftMessage) defaultParams.bccCreateDraftMessage = nodeData.inputs.bccCreateDraftMessage + if (nodeData.inputs?.toSendMessage) defaultParams.toSendMessage = nodeData.inputs.toSendMessage + if (nodeData.inputs?.subjectSendMessage) defaultParams.subjectSendMessage = nodeData.inputs.subjectSendMessage + if (nodeData.inputs?.bodySendMessage) defaultParams.bodySendMessage = nodeData.inputs.bodySendMessage + if (nodeData.inputs?.messageIdUpdateMessage) defaultParams.messageIdUpdateMessage = nodeData.inputs.messageIdUpdateMessage + if (nodeData.inputs?.isReadUpdateMessage !== undefined) defaultParams.isReadUpdateMessage = nodeData.inputs.isReadUpdateMessage + if (nodeData.inputs?.messageIdDeleteMessage) defaultParams.messageIdDeleteMessage = nodeData.inputs.messageIdDeleteMessage + if (nodeData.inputs?.messageIdCopyMessage) defaultParams.messageIdCopyMessage = nodeData.inputs.messageIdCopyMessage + if (nodeData.inputs?.destinationFolderIdCopyMessage) + defaultParams.destinationFolderIdCopyMessage = nodeData.inputs.destinationFolderIdCopyMessage + if (nodeData.inputs?.messageIdMoveMessage) defaultParams.messageIdMoveMessage = nodeData.inputs.messageIdMoveMessage + if (nodeData.inputs?.destinationFolderIdMoveMessage) + defaultParams.destinationFolderIdMoveMessage = nodeData.inputs.destinationFolderIdMoveMessage + if (nodeData.inputs?.messageIdReplyMessage) defaultParams.messageIdReplyMessage = nodeData.inputs.messageIdReplyMessage + if (nodeData.inputs?.replyBodyReplyMessage) defaultParams.replyBodyReplyMessage = nodeData.inputs.replyBodyReplyMessage + if (nodeData.inputs?.messageIdForwardMessage) defaultParams.messageIdForwardMessage = nodeData.inputs.messageIdForwardMessage + if (nodeData.inputs?.forwardToForwardMessage) defaultParams.forwardToForwardMessage = nodeData.inputs.forwardToForwardMessage + if (nodeData.inputs?.forwardCommentForwardMessage) + defaultParams.forwardCommentForwardMessage = nodeData.inputs.forwardCommentForwardMessage + + return defaultParams + } +} + +module.exports = { nodeClass: MicrosoftOutlook_Tools } diff --git a/packages/components/nodes/tools/MicrosoftOutlook/core.ts b/packages/components/nodes/tools/MicrosoftOutlook/core.ts new file mode 100644 index 000000000..0468da632 --- /dev/null +++ b/packages/components/nodes/tools/MicrosoftOutlook/core.ts @@ -0,0 +1,969 @@ +import { z } from 'zod' +import fetch from 'node-fetch' +import { DynamicStructuredTool } from '../OpenAPIToolkit/core' +import { TOOL_ARGS_PREFIX, formatToolError } from '../../../src/agents' + +export const desc = `Use this when you want to access Microsoft Outlook API for managing calendars, events, and messages` + +export interface Headers { + [key: string]: string +} + +export interface Body { + [key: string]: any +} + +export interface RequestParameters { + headers?: Headers + body?: Body + url?: string + description?: string + name?: string + actions?: string[] + accessToken?: string + defaultParams?: any +} + +// Define schemas for different Outlook operations + +// Calendar Schemas +const ListCalendarsSchema = z.object({ + maxResults: z.number().optional().default(50).describe('Maximum number of calendars to return') +}) + +const GetCalendarSchema = z.object({ + calendarId: z.string().describe('ID of the calendar to retrieve') +}) + +const CreateCalendarSchema = z.object({ + calendarName: z.string().describe('Name of the calendar') +}) + +const UpdateCalendarSchema = z.object({ + calendarId: z.string().describe('ID of the calendar to update'), + calendarName: z.string().describe('New name of the calendar') +}) + +const DeleteCalendarSchema = z.object({ + calendarId: z.string().describe('ID of the calendar to delete') +}) + +const ListEventsSchema = z.object({ + calendarId: z.string().optional().describe('ID of the calendar (empty for primary calendar)'), + maxResults: z.number().optional().default(50).describe('Maximum number of events to return'), + startDateTime: z.string().optional().describe('Start date time filter in ISO format'), + endDateTime: z.string().optional().describe('End date time filter in ISO format') +}) + +const GetEventSchema = z.object({ + eventId: z.string().describe('ID of the event to retrieve') +}) + +const CreateEventSchema = z.object({ + subject: z.string().describe('Subject/title of the event'), + body: z.string().optional().describe('Body/description of the event'), + startDateTime: z.string().describe('Start date and time in ISO format'), + endDateTime: z.string().describe('End date and time in ISO format'), + timeZone: z.string().optional().default('UTC').describe('Time zone for the event'), + location: z.string().optional().describe('Location of the event'), + attendees: z.string().optional().describe('Comma-separated list of attendee email addresses') +}) + +const UpdateEventSchema = z.object({ + eventId: z.string().describe('ID of the event to update'), + subject: z.string().optional().describe('New subject/title of the event') +}) + +const DeleteEventSchema = z.object({ + eventId: z.string().describe('ID of the event to delete') +}) + +// Message Schemas +const ListMessagesSchema = z.object({ + maxResults: z.number().optional().default(50).describe('Maximum number of messages to return'), + filter: z.string().optional().describe('Filter query (e.g., "isRead eq false")') +}) + +const GetMessageSchema = z.object({ + messageId: z.string().describe('ID of the message to retrieve') +}) + +const CreateDraftMessageSchema = z.object({ + to: z.string().describe('Recipient email address(es), comma-separated'), + subject: z.string().optional().describe('Subject of the message'), + body: z.string().optional().describe('Body content of the message'), + cc: z.string().optional().describe('CC email address(es), comma-separated'), + bcc: z.string().optional().describe('BCC email address(es), comma-separated') +}) + +const SendMessageSchema = z.object({ + to: z.string().describe('Recipient email address(es), comma-separated'), + subject: z.string().optional().describe('Subject of the message'), + body: z.string().optional().describe('Body content of the message') +}) + +const UpdateMessageSchema = z.object({ + messageId: z.string().describe('ID of the message to update'), + isRead: z.boolean().optional().describe('Mark message as read/unread') +}) + +const DeleteMessageSchema = z.object({ + messageId: z.string().describe('ID of the message to delete') +}) + +const CopyMessageSchema = z.object({ + messageId: z.string().describe('ID of the message to copy'), + destinationFolderId: z.string().describe('ID of the destination folder') +}) + +const MoveMessageSchema = z.object({ + messageId: z.string().describe('ID of the message to move'), + destinationFolderId: z.string().describe('ID of the destination folder') +}) + +const ReplyMessageSchema = z.object({ + messageId: z.string().describe('ID of the message to reply to'), + replyBody: z.string().describe('Reply message body') +}) + +const ForwardMessageSchema = z.object({ + messageId: z.string().describe('ID of the message to forward'), + forwardTo: z.string().describe('Email address(es) to forward to, comma-separated'), + forwardComment: z.string().optional().describe('Additional comment to include with forward') +}) + +class BaseOutlookTool extends DynamicStructuredTool { + protected accessToken: string = '' + + constructor(args: any) { + super(args) + this.accessToken = args.accessToken ?? '' + } + + async makeGraphRequest(url: string, method: string = 'GET', body?: any, params?: any): Promise { + const headers = { + Authorization: `Bearer ${this.accessToken}`, + 'Content-Type': 'application/json', + ...this.headers + } + + const response = await fetch(url, { + method, + headers, + body: body ? JSON.stringify(body) : undefined + }) + + if (!response.ok) { + const errorText = await response.text() + throw new Error(`Graph API Error ${response.status}: ${response.statusText} - ${errorText}`) + } + + const data = await response.text() + return data + TOOL_ARGS_PREFIX + JSON.stringify(params) + } + + parseEmailAddresses(emailString: string) { + return emailString.split(',').map((email) => ({ + emailAddress: { + address: email.trim(), + name: email.trim() + } + })) + } +} + +// Calendar Tools +class ListCalendarsTool extends BaseOutlookTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'list_calendars', + description: 'List calendars in Microsoft Outlook', + schema: ListCalendarsSchema, + baseUrl: 'https://graph.microsoft.com/v1.0/me/calendars', + method: 'GET', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const queryParams = new URLSearchParams() + + if (params.maxResults) queryParams.append('$top', params.maxResults.toString()) + + const url = `https://graph.microsoft.com/v1.0/me/calendars?${queryParams.toString()}` + + try { + const response = await this.makeGraphRequest(url, 'GET', undefined, params) + return response + } catch (error) { + return formatToolError(`Error listing calendars: ${error}`, {}) + } + } +} + +class GetCalendarTool extends BaseOutlookTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'get_calendar', + description: 'Get a specific calendar by ID from Microsoft Outlook', + schema: GetCalendarSchema, + baseUrl: 'https://graph.microsoft.com/v1.0/me/calendars', + method: 'GET', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const url = `https://graph.microsoft.com/v1.0/me/calendars/${params.calendarId}` + + try { + const response = await this.makeGraphRequest(url, 'GET', undefined, params) + return response + } catch (error) { + return formatToolError(`Error getting calendar: ${error}`, params) + } + } +} + +class CreateCalendarTool extends BaseOutlookTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'create_calendar', + description: 'Create a new calendar in Microsoft Outlook', + schema: CreateCalendarSchema, + baseUrl: 'https://graph.microsoft.com/v1.0/me/calendars', + method: 'POST', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const calendarData = { + name: params.calendarName + } + + const url = 'https://graph.microsoft.com/v1.0/me/calendars' + const response = await this.makeGraphRequest(url, 'POST', calendarData, params) + return response + } catch (error) { + return formatToolError(`Error creating calendar: ${error}`, params) + } + } +} + +class UpdateCalendarTool extends BaseOutlookTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'update_calendar', + description: 'Update a calendar in Microsoft Outlook', + schema: UpdateCalendarSchema, + baseUrl: 'https://graph.microsoft.com/v1.0/me/calendars', + method: 'PATCH', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const calendarData = { + name: params.calendarName + } + + const url = `https://graph.microsoft.com/v1.0/me/calendars/${params.calendarId}` + const response = await this.makeGraphRequest(url, 'PATCH', calendarData, params) + return response + } catch (error) { + return formatToolError(`Error updating calendar: ${error}`, params) + } + } +} + +class DeleteCalendarTool extends BaseOutlookTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'delete_calendar', + description: 'Delete a calendar from Microsoft Outlook', + schema: DeleteCalendarSchema, + baseUrl: 'https://graph.microsoft.com/v1.0/me/calendars', + method: 'DELETE', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const url = `https://graph.microsoft.com/v1.0/me/calendars/${params.calendarId}` + + try { + await this.makeGraphRequest(url, 'DELETE', undefined, params) + return `Calendar ${params.calendarId} deleted successfully` + } catch (error) { + return formatToolError(`Error deleting calendar: ${error}`, params) + } + } +} + +class ListEventsTool extends BaseOutlookTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'list_events', + description: 'List events from Microsoft Outlook calendar', + schema: ListEventsSchema, + baseUrl: 'https://graph.microsoft.com/v1.0/me/events', + method: 'GET', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const queryParams = new URLSearchParams() + + if (params.maxResults) queryParams.append('$top', params.maxResults.toString()) + if (params.startDateTime) queryParams.append('$filter', `start/dateTime ge '${params.startDateTime}'`) + if (params.endDateTime) { + const existingFilter = queryParams.get('$filter') + const endFilter = `end/dateTime le '${params.endDateTime}'` + if (existingFilter) { + queryParams.set('$filter', `${existingFilter} and ${endFilter}`) + } else { + queryParams.append('$filter', endFilter) + } + } + + const baseUrl = params.calendarId + ? `https://graph.microsoft.com/v1.0/me/calendars/${params.calendarId}/events` + : 'https://graph.microsoft.com/v1.0/me/events' + + const url = `${baseUrl}?${queryParams.toString()}` + + try { + const response = await this.makeGraphRequest(url, 'GET', undefined, params) + return response + } catch (error) { + return formatToolError(`Error listing events: ${error}`, params) + } + } +} + +class GetEventTool extends BaseOutlookTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'get_event', + description: 'Get a specific event by ID from Microsoft Outlook', + schema: GetEventSchema, + baseUrl: 'https://graph.microsoft.com/v1.0/me/events', + method: 'GET', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const url = `https://graph.microsoft.com/v1.0/me/events/${params.eventId}` + + try { + const response = await this.makeGraphRequest(url, 'GET', undefined, params) + return response + } catch (error) { + return formatToolError(`Error getting event: ${error}`, params) + } + } +} + +class CreateEventTool extends BaseOutlookTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'create_event', + description: 'Create a new event in Microsoft Outlook calendar', + schema: CreateEventSchema, + baseUrl: 'https://graph.microsoft.com/v1.0/me/events', + method: 'POST', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const eventData = { + subject: params.subject, + body: { + contentType: 'HTML', + content: params.body || '' + }, + start: { + dateTime: params.startDateTime, + timeZone: params.timeZone || 'UTC' + }, + end: { + dateTime: params.endDateTime, + timeZone: params.timeZone || 'UTC' + }, + location: params.location + ? { + displayName: params.location + } + : undefined, + attendees: params.attendees ? this.parseEmailAddresses(params.attendees) : [] + } + + const url = 'https://graph.microsoft.com/v1.0/me/events' + const response = await this.makeGraphRequest(url, 'POST', eventData, params) + return response + } catch (error) { + return formatToolError(`Error creating event: ${error}`, params) + } + } +} + +class UpdateEventTool extends BaseOutlookTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'update_event', + description: 'Update an event in Microsoft Outlook calendar', + schema: UpdateEventSchema, + baseUrl: 'https://graph.microsoft.com/v1.0/me/events', + method: 'PATCH', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const eventData: any = {} + if (params.subject) eventData.subject = params.subject + + const url = `https://graph.microsoft.com/v1.0/me/events/${params.eventId}` + const response = await this.makeGraphRequest(url, 'PATCH', eventData, params) + return response + } catch (error) { + return formatToolError(`Error updating event: ${error}`, params) + } + } +} + +class DeleteEventTool extends BaseOutlookTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'delete_event', + description: 'Delete an event from Microsoft Outlook calendar', + schema: DeleteEventSchema, + baseUrl: 'https://graph.microsoft.com/v1.0/me/events', + method: 'DELETE', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const url = `https://graph.microsoft.com/v1.0/me/events/${params.eventId}` + + try { + await this.makeGraphRequest(url, 'DELETE', undefined, params) + return `Event ${params.eventId} deleted successfully` + } catch (error) { + return formatToolError(`Error deleting event: ${error}`, params) + } + } +} + +// Message Tools +class ListMessagesTool extends BaseOutlookTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'list_messages', + description: 'List messages from Microsoft Outlook mailbox', + schema: ListMessagesSchema, + baseUrl: 'https://graph.microsoft.com/v1.0/me/messages', + method: 'GET', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const queryParams = new URLSearchParams() + + if (params.maxResults) queryParams.append('$top', params.maxResults.toString()) + if (params.filter) queryParams.append('$filter', params.filter) + + const url = `https://graph.microsoft.com/v1.0/me/messages?${queryParams.toString()}` + + try { + const response = await this.makeGraphRequest(url, 'GET', undefined, params) + return response + } catch (error) { + return formatToolError(`Error listing messages: ${error}`, params) + } + } +} + +class GetMessageTool extends BaseOutlookTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'get_message', + description: 'Get a specific message by ID from Microsoft Outlook', + schema: GetMessageSchema, + baseUrl: 'https://graph.microsoft.com/v1.0/me/messages', + method: 'GET', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const url = `https://graph.microsoft.com/v1.0/me/messages/${params.messageId}` + + try { + const response = await this.makeGraphRequest(url, 'GET', undefined, params) + return response + } catch (error) { + return formatToolError(`Error getting message: ${error}`, params) + } + } +} + +class CreateDraftMessageTool extends BaseOutlookTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'create_draft_message', + description: 'Create a draft message in Microsoft Outlook', + schema: CreateDraftMessageSchema, + baseUrl: 'https://graph.microsoft.com/v1.0/me/messages', + method: 'POST', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const messageData = { + subject: params.subject || '', + body: { + contentType: 'HTML', + content: params.body || '' + }, + toRecipients: this.parseEmailAddresses(params.to), + ccRecipients: params.cc ? this.parseEmailAddresses(params.cc) : [], + bccRecipients: params.bcc ? this.parseEmailAddresses(params.bcc) : [] + } + + const url = 'https://graph.microsoft.com/v1.0/me/messages' + const response = await this.makeGraphRequest(url, 'POST', messageData, params) + return response + } catch (error) { + return formatToolError(`Error creating draft message: ${error}`, params) + } + } +} + +class SendMessageTool extends BaseOutlookTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'send_message', + description: 'Send a message via Microsoft Outlook', + schema: SendMessageSchema, + baseUrl: 'https://graph.microsoft.com/v1.0/me/sendMail', + method: 'POST', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const messageData = { + message: { + subject: params.subject || '', + body: { + contentType: 'HTML', + content: params.body || '' + }, + toRecipients: this.parseEmailAddresses(params.to) + }, + saveToSentItems: true + } + + const url = 'https://graph.microsoft.com/v1.0/me/sendMail' + await this.makeGraphRequest(url, 'POST', messageData, params) + return 'Message sent successfully' + } catch (error) { + return formatToolError(`Error sending message: ${error}`, params) + } + } +} + +class UpdateMessageTool extends BaseOutlookTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'update_message', + description: 'Update a message in Microsoft Outlook', + schema: UpdateMessageSchema, + baseUrl: 'https://graph.microsoft.com/v1.0/me/messages', + method: 'PATCH', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const messageData: any = {} + if (params.isRead !== undefined) messageData.isRead = params.isRead + + const url = `https://graph.microsoft.com/v1.0/me/messages/${params.messageId}` + const response = await this.makeGraphRequest(url, 'PATCH', messageData, params) + return response + } catch (error) { + return formatToolError(`Error updating message: ${error}`, params) + } + } +} + +class DeleteMessageTool extends BaseOutlookTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'delete_message', + description: 'Delete a message from Microsoft Outlook', + schema: DeleteMessageSchema, + baseUrl: 'https://graph.microsoft.com/v1.0/me/messages', + method: 'DELETE', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const url = `https://graph.microsoft.com/v1.0/me/messages/${params.messageId}` + + try { + await this.makeGraphRequest(url, 'DELETE', undefined, params) + return `Message ${params.messageId} deleted successfully` + } catch (error) { + return formatToolError(`Error deleting message: ${error}`, params) + } + } +} + +class CopyMessageTool extends BaseOutlookTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'copy_message', + description: 'Copy a message to another folder in Microsoft Outlook', + schema: CopyMessageSchema, + baseUrl: 'https://graph.microsoft.com/v1.0/me/messages', + method: 'POST', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const copyData = { + destinationId: params.destinationFolderId + } + + const url = `https://graph.microsoft.com/v1.0/me/messages/${params.messageId}/copy` + const response = await this.makeGraphRequest(url, 'POST', copyData, params) + return response + } catch (error) { + return formatToolError(`Error copying message: ${error}`, params) + } + } +} + +class MoveMessageTool extends BaseOutlookTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'move_message', + description: 'Move a message to another folder in Microsoft Outlook', + schema: MoveMessageSchema, + baseUrl: 'https://graph.microsoft.com/v1.0/me/messages', + method: 'POST', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const moveData = { + destinationId: params.destinationFolderId + } + + const url = `https://graph.microsoft.com/v1.0/me/messages/${params.messageId}/move` + const response = await this.makeGraphRequest(url, 'POST', moveData, params) + return response + } catch (error) { + return formatToolError(`Error moving message: ${error}`, params) + } + } +} + +class ReplyMessageTool extends BaseOutlookTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'reply_message', + description: 'Reply to a message in Microsoft Outlook', + schema: ReplyMessageSchema, + baseUrl: 'https://graph.microsoft.com/v1.0/me/messages', + method: 'POST', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const replyData = { + comment: params.replyBody + } + + const url = `https://graph.microsoft.com/v1.0/me/messages/${params.messageId}/reply` + await this.makeGraphRequest(url, 'POST', replyData, params) + return 'Reply sent successfully' + } catch (error) { + return formatToolError(`Error replying to message: ${error}`, params) + } + } +} + +class ForwardMessageTool extends BaseOutlookTool { + defaultParams: any + + constructor(args: any) { + const toolInput = { + name: 'forward_message', + description: 'Forward a message in Microsoft Outlook', + schema: ForwardMessageSchema, + baseUrl: 'https://graph.microsoft.com/v1.0/me/messages', + method: 'POST', + headers: {} + } + super({ ...toolInput, accessToken: args.accessToken }) + this.defaultParams = args.defaultParams || {} + } + + async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + + try { + const forwardData = { + toRecipients: this.parseEmailAddresses(params.forwardTo), + comment: params.forwardComment || '' + } + + const url = `https://graph.microsoft.com/v1.0/me/messages/${params.messageId}/forward` + await this.makeGraphRequest(url, 'POST', forwardData, params) + return 'Message forwarded successfully' + } catch (error) { + return `Error forwarding message: ${error}` + } + } +} + +export const createOutlookTools = (args?: RequestParameters): DynamicStructuredTool[] => { + const tools: DynamicStructuredTool[] = [] + const actions = args?.actions || [] + const accessToken = args?.accessToken || '' + const defaultParams = args?.defaultParams || {} + + // Calendar tools + if (actions.includes('listCalendars')) { + const listTool = new ListCalendarsTool({ accessToken, defaultParams }) + tools.push(listTool) + } + + if (actions.includes('getCalendar')) { + const getTool = new GetCalendarTool({ accessToken, defaultParams }) + tools.push(getTool) + } + + if (actions.includes('createCalendar')) { + const createTool = new CreateCalendarTool({ accessToken, defaultParams }) + tools.push(createTool) + } + + if (actions.includes('updateCalendar')) { + const updateTool = new UpdateCalendarTool({ accessToken, defaultParams }) + tools.push(updateTool) + } + + if (actions.includes('deleteCalendar')) { + const deleteTool = new DeleteCalendarTool({ accessToken, defaultParams }) + tools.push(deleteTool) + } + + if (actions.includes('listEvents')) { + const listTool = new ListEventsTool({ accessToken, defaultParams }) + tools.push(listTool) + } + + if (actions.includes('getEvent')) { + const getTool = new GetEventTool({ accessToken, defaultParams }) + tools.push(getTool) + } + + if (actions.includes('createEvent')) { + const createTool = new CreateEventTool({ accessToken, defaultParams }) + tools.push(createTool) + } + + if (actions.includes('updateEvent')) { + const updateTool = new UpdateEventTool({ accessToken, defaultParams }) + tools.push(updateTool) + } + + if (actions.includes('deleteEvent')) { + const deleteTool = new DeleteEventTool({ accessToken, defaultParams }) + tools.push(deleteTool) + } + + // Message tools + if (actions.includes('listMessages')) { + const listTool = new ListMessagesTool({ accessToken, defaultParams }) + tools.push(listTool) + } + + if (actions.includes('getMessage')) { + const getTool = new GetMessageTool({ accessToken, defaultParams }) + tools.push(getTool) + } + + if (actions.includes('createDraftMessage')) { + const createTool = new CreateDraftMessageTool({ accessToken, defaultParams }) + tools.push(createTool) + } + + if (actions.includes('sendMessage')) { + const sendTool = new SendMessageTool({ accessToken, defaultParams }) + tools.push(sendTool) + } + + if (actions.includes('updateMessage')) { + const updateTool = new UpdateMessageTool({ accessToken, defaultParams }) + tools.push(updateTool) + } + + if (actions.includes('deleteMessage')) { + const deleteTool = new DeleteMessageTool({ accessToken, defaultParams }) + tools.push(deleteTool) + } + + if (actions.includes('copyMessage')) { + const copyTool = new CopyMessageTool({ accessToken, defaultParams }) + tools.push(copyTool) + } + + if (actions.includes('moveMessage')) { + const moveTool = new MoveMessageTool({ accessToken, defaultParams }) + tools.push(moveTool) + } + + if (actions.includes('replyMessage')) { + const replyTool = new ReplyMessageTool({ accessToken, defaultParams }) + tools.push(replyTool) + } + + if (actions.includes('forwardMessage')) { + const forwardTool = new ForwardMessageTool({ accessToken, defaultParams }) + tools.push(forwardTool) + } + + return tools +} diff --git a/packages/components/nodes/tools/MicrosoftOutlook/outlook.svg b/packages/components/nodes/tools/MicrosoftOutlook/outlook.svg new file mode 100644 index 000000000..134a2ee92 --- /dev/null +++ b/packages/components/nodes/tools/MicrosoftOutlook/outlook.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/components/nodes/tools/MicrosoftTeams/MicrosoftTeams.ts b/packages/components/nodes/tools/MicrosoftTeams/MicrosoftTeams.ts new file mode 100644 index 000000000..d9391c504 --- /dev/null +++ b/packages/components/nodes/tools/MicrosoftTeams/MicrosoftTeams.ts @@ -0,0 +1,1012 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { convertMultiOptionsToStringArray, getCredentialData, getCredentialParam, refreshOAuth2Token } from '../../../src/utils' +import { createTeamsTools } from './core' + +class MicrosoftTeams_Tools implements INode { + label: string + name: string + version: number + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + credential: INodeParams + + constructor() { + this.label = 'Microsoft Teams' + this.name = 'microsoftTeams' + this.version = 1.0 + this.type = 'MicrosoftTeams' + this.icon = 'teams.svg' + this.category = 'Tools' + this.description = 'Perform Microsoft Teams operations for channels, chats, and chat messages' + this.baseClasses = [this.type, 'Tool'] + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + credentialNames: ['microsoftTeamsOAuth2'] + } + this.inputs = [ + { + label: 'Type', + name: 'teamsType', + type: 'options', + options: [ + { + label: 'Channel', + name: 'channel' + }, + { + label: 'Chat', + name: 'chat' + }, + { + label: 'Chat Message', + name: 'chatMessage' + } + ] + }, + // Channel Actions + { + label: 'Channel Actions', + name: 'channelActions', + type: 'multiOptions', + options: [ + { + label: 'List Channels', + name: 'listChannels' + }, + { + label: 'Get Channel', + name: 'getChannel' + }, + { + label: 'Create Channel', + name: 'createChannel' + }, + { + label: 'Update Channel', + name: 'updateChannel' + }, + { + label: 'Delete Channel', + name: 'deleteChannel' + }, + { + label: 'Archive Channel', + name: 'archiveChannel' + }, + { + label: 'Unarchive Channel', + name: 'unarchiveChannel' + }, + { + label: 'List Channel Members', + name: 'listChannelMembers' + }, + { + label: 'Add Channel Member', + name: 'addChannelMember' + }, + { + label: 'Remove Channel Member', + name: 'removeChannelMember' + } + ], + show: { + teamsType: ['channel'] + } + }, + // Chat Actions + { + label: 'Chat Actions', + name: 'chatActions', + type: 'multiOptions', + options: [ + { + label: 'List Chats', + name: 'listChats' + }, + { + label: 'Get Chat', + name: 'getChat' + }, + { + label: 'Create Chat', + name: 'createChat' + }, + { + label: 'Update Chat', + name: 'updateChat' + }, + { + label: 'Delete Chat', + name: 'deleteChat' + }, + { + label: 'List Chat Members', + name: 'listChatMembers' + }, + { + label: 'Add Chat Member', + name: 'addChatMember' + }, + { + label: 'Remove Chat Member', + name: 'removeChatMember' + }, + { + label: 'Pin Message', + name: 'pinMessage' + }, + { + label: 'Unpin Message', + name: 'unpinMessage' + } + ], + show: { + teamsType: ['chat'] + } + }, + // Chat Message Actions + { + label: 'Chat Message Actions', + name: 'chatMessageActions', + type: 'multiOptions', + options: [ + { + label: 'List Messages', + name: 'listMessages' + }, + { + label: 'Get Message', + name: 'getMessage' + }, + { + label: 'Send Message', + name: 'sendMessage' + }, + { + label: 'Update Message', + name: 'updateMessage' + }, + { + label: 'Delete Message', + name: 'deleteMessage' + }, + { + label: 'Reply to Message', + name: 'replyToMessage' + }, + { + label: 'Set Reaction', + name: 'setReaction' + }, + { + label: 'Unset Reaction', + name: 'unsetReaction' + }, + { + label: 'Get All Messages', + name: 'getAllMessages' + } + ], + show: { + teamsType: ['chatMessage'] + } + }, + + // CHANNEL PARAMETERS + // List Channels Parameters + { + label: 'Team ID [List Channels]', + name: 'teamIdListChannels', + type: 'string', + description: 'ID of the team to list channels from', + show: { + teamsType: ['channel'], + channelActions: ['listChannels'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Max Results [List Channels]', + name: 'maxResultsListChannels', + type: 'number', + description: 'Maximum number of channels to return', + default: 50, + show: { + teamsType: ['channel'], + channelActions: ['listChannels'] + }, + additionalParams: true, + optional: true + }, + + // Get Channel Parameters + { + label: 'Team ID [Get Channel]', + name: 'teamIdGetChannel', + type: 'string', + description: 'ID of the team that contains the channel', + show: { + teamsType: ['channel'], + channelActions: ['getChannel'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Channel ID [Get Channel]', + name: 'channelIdGetChannel', + type: 'string', + description: 'ID of the channel to retrieve', + show: { + teamsType: ['channel'], + channelActions: ['getChannel'] + }, + additionalParams: true, + optional: true + }, + + // Create Channel Parameters + { + label: 'Team ID [Create Channel]', + name: 'teamIdCreateChannel', + type: 'string', + description: 'ID of the team to create the channel in', + show: { + teamsType: ['channel'], + channelActions: ['createChannel'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Display Name [Create Channel]', + name: 'displayNameCreateChannel', + type: 'string', + description: 'Display name of the channel', + placeholder: 'My New Channel', + show: { + teamsType: ['channel'], + channelActions: ['createChannel'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Description [Create Channel]', + name: 'descriptionCreateChannel', + type: 'string', + description: 'Description of the channel', + placeholder: 'Channel description', + rows: 2, + show: { + teamsType: ['channel'], + channelActions: ['createChannel'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Membership Type [Create Channel]', + name: 'membershipTypeCreateChannel', + type: 'options', + options: [ + { label: 'Standard', name: 'standard' }, + { label: 'Private', name: 'private' }, + { label: 'Shared', name: 'shared' } + ], + default: 'standard', + description: 'Type of channel membership', + show: { + teamsType: ['channel'], + channelActions: ['createChannel'] + }, + additionalParams: true, + optional: true + }, + + // Update Channel Parameters + { + label: 'Team ID [Update Channel]', + name: 'teamIdUpdateChannel', + type: 'string', + description: 'ID of the team that contains the channel', + show: { + teamsType: ['channel'], + channelActions: ['updateChannel'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Channel ID [Update Channel]', + name: 'channelIdUpdateChannel', + type: 'string', + description: 'ID of the channel to update', + show: { + teamsType: ['channel'], + channelActions: ['updateChannel'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Display Name [Update Channel]', + name: 'displayNameUpdateChannel', + type: 'string', + description: 'New display name of the channel', + show: { + teamsType: ['channel'], + channelActions: ['updateChannel'] + }, + additionalParams: true, + optional: true + }, + + // Delete/Archive Channel Parameters + { + label: 'Team ID [Delete/Archive Channel]', + name: 'teamIdDeleteChannel', + type: 'string', + description: 'ID of the team that contains the channel', + show: { + teamsType: ['channel'], + channelActions: ['deleteChannel', 'archiveChannel', 'unarchiveChannel'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Channel ID [Delete/Archive Channel]', + name: 'channelIdDeleteChannel', + type: 'string', + description: 'ID of the channel to delete or archive', + show: { + teamsType: ['channel'], + channelActions: ['deleteChannel', 'archiveChannel', 'unarchiveChannel'] + }, + additionalParams: true, + optional: true + }, + + // Channel Members Parameters + { + label: 'Team ID [Channel Members]', + name: 'teamIdChannelMembers', + type: 'string', + description: 'ID of the team that contains the channel', + show: { + teamsType: ['channel'], + channelActions: ['listChannelMembers', 'addChannelMember', 'removeChannelMember'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Channel ID [Channel Members]', + name: 'channelIdChannelMembers', + type: 'string', + description: 'ID of the channel', + show: { + teamsType: ['channel'], + channelActions: ['listChannelMembers', 'addChannelMember', 'removeChannelMember'] + }, + additionalParams: true, + optional: true + }, + { + label: 'User ID [Add/Remove Channel Member]', + name: 'userIdChannelMember', + type: 'string', + description: 'ID of the user to add or remove', + show: { + teamsType: ['channel'], + channelActions: ['addChannelMember', 'removeChannelMember'] + }, + additionalParams: true, + optional: true + }, + + // CHAT PARAMETERS + // List Chats Parameters + { + label: 'Max Results [List Chats]', + name: 'maxResultsListChats', + type: 'number', + description: 'Maximum number of chats to return', + default: 50, + show: { + teamsType: ['chat'], + chatActions: ['listChats'] + }, + additionalParams: true, + optional: true + }, + + // Get Chat Parameters + { + label: 'Chat ID [Get Chat]', + name: 'chatIdGetChat', + type: 'string', + description: 'ID of the chat to retrieve', + show: { + teamsType: ['chat'], + chatActions: ['getChat'] + }, + additionalParams: true, + optional: true + }, + + // Create Chat Parameters + { + label: 'Chat Type [Create Chat]', + name: 'chatTypeCreateChat', + type: 'options', + options: [ + { label: 'One on One', name: 'oneOnOne' }, + { label: 'Group', name: 'group' } + ], + default: 'group', + description: 'Type of chat to create', + show: { + teamsType: ['chat'], + chatActions: ['createChat'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Topic [Create Chat]', + name: 'topicCreateChat', + type: 'string', + description: 'Topic/subject of the chat (for group chats)', + placeholder: 'Chat topic', + show: { + teamsType: ['chat'], + chatActions: ['createChat'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Members [Create Chat]', + name: 'membersCreateChat', + type: 'string', + description: 'Comma-separated list of user IDs to add to the chat', + placeholder: 'user1@example.com,user2@example.com', + show: { + teamsType: ['chat'], + chatActions: ['createChat'] + }, + additionalParams: true, + optional: true + }, + + // Update Chat Parameters + { + label: 'Chat ID [Update Chat]', + name: 'chatIdUpdateChat', + type: 'string', + description: 'ID of the chat to update', + show: { + teamsType: ['chat'], + chatActions: ['updateChat'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Topic [Update Chat]', + name: 'topicUpdateChat', + type: 'string', + description: 'New topic/subject of the chat', + show: { + teamsType: ['chat'], + chatActions: ['updateChat'] + }, + additionalParams: true, + optional: true + }, + + // Delete Chat Parameters + { + label: 'Chat ID [Delete Chat]', + name: 'chatIdDeleteChat', + type: 'string', + description: 'ID of the chat to delete', + show: { + teamsType: ['chat'], + chatActions: ['deleteChat'] + }, + additionalParams: true, + optional: true + }, + + // Chat Members Parameters + { + label: 'Chat ID [Chat Members]', + name: 'chatIdChatMembers', + type: 'string', + description: 'ID of the chat', + show: { + teamsType: ['chat'], + chatActions: ['listChatMembers', 'addChatMember', 'removeChatMember'] + }, + additionalParams: true, + optional: true + }, + { + label: 'User ID [Add/Remove Chat Member]', + name: 'userIdChatMember', + type: 'string', + description: 'ID of the user to add or remove', + show: { + teamsType: ['chat'], + chatActions: ['addChatMember', 'removeChatMember'] + }, + additionalParams: true, + optional: true + }, + + // Pin/Unpin Message Parameters + { + label: 'Chat ID [Pin/Unpin Message]', + name: 'chatIdPinMessage', + type: 'string', + description: 'ID of the chat', + show: { + teamsType: ['chat'], + chatActions: ['pinMessage', 'unpinMessage'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Message ID [Pin/Unpin Message]', + name: 'messageIdPinMessage', + type: 'string', + description: 'ID of the message to pin or unpin', + show: { + teamsType: ['chat'], + chatActions: ['pinMessage', 'unpinMessage'] + }, + additionalParams: true, + optional: true + }, + + // CHAT MESSAGE PARAMETERS + // List Messages Parameters + { + label: 'Chat/Channel ID [List Messages]', + name: 'chatChannelIdListMessages', + type: 'string', + description: 'ID of the chat or channel to list messages from', + show: { + teamsType: ['chatMessage'], + chatMessageActions: ['listMessages'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Team ID [List Messages - Channel Only]', + name: 'teamIdListMessages', + type: 'string', + description: 'ID of the team (required for channel messages)', + show: { + teamsType: ['chatMessage'], + chatMessageActions: ['listMessages'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Max Results [List Messages]', + name: 'maxResultsListMessages', + type: 'number', + description: 'Maximum number of messages to return', + default: 50, + show: { + teamsType: ['chatMessage'], + chatMessageActions: ['listMessages'] + }, + additionalParams: true, + optional: true + }, + + // Get Message Parameters + { + label: 'Chat/Channel ID [Get Message]', + name: 'chatChannelIdGetMessage', + type: 'string', + description: 'ID of the chat or channel', + show: { + teamsType: ['chatMessage'], + chatMessageActions: ['getMessage'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Team ID [Get Message - Channel Only]', + name: 'teamIdGetMessage', + type: 'string', + description: 'ID of the team (required for channel messages)', + show: { + teamsType: ['chatMessage'], + chatMessageActions: ['getMessage'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Message ID [Get Message]', + name: 'messageIdGetMessage', + type: 'string', + description: 'ID of the message to retrieve', + show: { + teamsType: ['chatMessage'], + chatMessageActions: ['getMessage'] + }, + additionalParams: true, + optional: true + }, + + // Send Message Parameters + { + label: 'Chat/Channel ID [Send Message]', + name: 'chatChannelIdSendMessage', + type: 'string', + description: 'ID of the chat or channel to send message to', + show: { + teamsType: ['chatMessage'], + chatMessageActions: ['sendMessage'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Team ID [Send Message - Channel Only]', + name: 'teamIdSendMessage', + type: 'string', + description: 'ID of the team (required for channel messages)', + show: { + teamsType: ['chatMessage'], + chatMessageActions: ['sendMessage'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Message Body [Send Message]', + name: 'messageBodySendMessage', + type: 'string', + description: 'Content of the message', + placeholder: 'Hello, this is a message!', + rows: 4, + show: { + teamsType: ['chatMessage'], + chatMessageActions: ['sendMessage'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Content Type [Send Message]', + name: 'contentTypeSendMessage', + type: 'options', + options: [ + { label: 'Text', name: 'text' }, + { label: 'HTML', name: 'html' } + ], + default: 'text', + description: 'Content type of the message', + show: { + teamsType: ['chatMessage'], + chatMessageActions: ['sendMessage'] + }, + additionalParams: true, + optional: true + }, + + // Update Message Parameters + { + label: 'Chat/Channel ID [Update Message]', + name: 'chatChannelIdUpdateMessage', + type: 'string', + description: 'ID of the chat or channel', + show: { + teamsType: ['chatMessage'], + chatMessageActions: ['updateMessage'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Team ID [Update Message - Channel Only]', + name: 'teamIdUpdateMessage', + type: 'string', + description: 'ID of the team (required for channel messages)', + show: { + teamsType: ['chatMessage'], + chatMessageActions: ['updateMessage'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Message ID [Update Message]', + name: 'messageIdUpdateMessage', + type: 'string', + description: 'ID of the message to update', + show: { + teamsType: ['chatMessage'], + chatMessageActions: ['updateMessage'] + }, + additionalParams: true, + optional: true + }, + + // Delete Message Parameters + { + label: 'Chat/Channel ID [Delete Message]', + name: 'chatChannelIdDeleteMessage', + type: 'string', + description: 'ID of the chat or channel', + show: { + teamsType: ['chatMessage'], + chatMessageActions: ['deleteMessage'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Team ID [Delete Message - Channel Only]', + name: 'teamIdDeleteMessage', + type: 'string', + description: 'ID of the team (required for channel messages)', + show: { + teamsType: ['chatMessage'], + chatMessageActions: ['deleteMessage'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Message ID [Delete Message]', + name: 'messageIdDeleteMessage', + type: 'string', + description: 'ID of the message to delete', + show: { + teamsType: ['chatMessage'], + chatMessageActions: ['deleteMessage'] + }, + additionalParams: true, + optional: true + }, + + // Reply to Message Parameters + { + label: 'Chat/Channel ID [Reply to Message]', + name: 'chatChannelIdReplyMessage', + type: 'string', + description: 'ID of the chat or channel', + show: { + teamsType: ['chatMessage'], + chatMessageActions: ['replyToMessage'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Team ID [Reply to Message - Channel Only]', + name: 'teamIdReplyMessage', + type: 'string', + description: 'ID of the team (required for channel messages)', + show: { + teamsType: ['chatMessage'], + chatMessageActions: ['replyToMessage'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Message ID [Reply to Message]', + name: 'messageIdReplyMessage', + type: 'string', + description: 'ID of the message to reply to', + show: { + teamsType: ['chatMessage'], + chatMessageActions: ['replyToMessage'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Reply Body [Reply to Message]', + name: 'replyBodyReplyMessage', + type: 'string', + description: 'Content of the reply', + placeholder: 'This is my reply', + rows: 3, + show: { + teamsType: ['chatMessage'], + chatMessageActions: ['replyToMessage'] + }, + additionalParams: true, + optional: true + }, + + // Set/Unset Reaction Parameters + { + label: 'Chat/Channel ID [Set/Unset Reaction]', + name: 'chatChannelIdReaction', + type: 'string', + description: 'ID of the chat or channel', + show: { + teamsType: ['chatMessage'], + chatMessageActions: ['setReaction', 'unsetReaction'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Team ID [Set/Unset Reaction - Channel Only]', + name: 'teamIdReaction', + type: 'string', + description: 'ID of the team (required for channel messages)', + show: { + teamsType: ['chatMessage'], + chatMessageActions: ['setReaction', 'unsetReaction'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Message ID [Set/Unset Reaction]', + name: 'messageIdReaction', + type: 'string', + description: 'ID of the message to react to', + show: { + teamsType: ['chatMessage'], + chatMessageActions: ['setReaction', 'unsetReaction'] + }, + additionalParams: true, + optional: true + }, + { + label: 'Reaction Type [Set Reaction]', + name: 'reactionTypeSetReaction', + type: 'options', + options: [ + { label: 'Like', name: 'like' }, + { label: 'Heart', name: 'heart' }, + { label: 'Laugh', name: 'laugh' }, + { label: 'Surprised', name: 'surprised' }, + { label: 'Sad', name: 'sad' }, + { label: 'Angry', name: 'angry' } + ], + default: 'like', + description: 'Type of reaction to set', + show: { + teamsType: ['chatMessage'], + chatMessageActions: ['setReaction'] + }, + additionalParams: true, + optional: true + } + ] + } + + async init(nodeData: INodeData, _: string, options: any): Promise { + const teamsType = nodeData.inputs?.teamsType as string + const channelActions = nodeData.inputs?.channelActions as string + const chatActions = nodeData.inputs?.chatActions as string + const chatMessageActions = nodeData.inputs?.chatMessageActions as string + + let actions: string[] = [] + if (teamsType === 'channel') { + actions = convertMultiOptionsToStringArray(channelActions) + } else if (teamsType === 'chat') { + actions = convertMultiOptionsToStringArray(chatActions) + } else if (teamsType === 'chatMessage') { + actions = convertMultiOptionsToStringArray(chatMessageActions) + } + + let credentialData = await getCredentialData(nodeData.credential ?? '', options) + credentialData = await refreshOAuth2Token(nodeData.credential ?? '', credentialData, options) + const accessToken = getCredentialParam('access_token', credentialData, nodeData) + + if (!accessToken) { + throw new Error('No access token found in credential') + } + + const defaultParams = this.transformNodeInputsToToolArgs(nodeData) + + const teamsTools = createTeamsTools({ + accessToken, + actions, + defaultParams, + type: teamsType + }) + + return teamsTools + } + + transformNodeInputsToToolArgs(nodeData: INodeData): Record { + // Collect default parameters from inputs + const defaultParams: Record = {} + + // Channel parameters + if (nodeData.inputs?.teamIdListChannels) defaultParams.teamIdListChannels = nodeData.inputs.teamIdListChannels + if (nodeData.inputs?.maxResultsListChannels) defaultParams.maxResultsListChannels = nodeData.inputs.maxResultsListChannels + if (nodeData.inputs?.teamIdGetChannel) defaultParams.teamIdGetChannel = nodeData.inputs.teamIdGetChannel + if (nodeData.inputs?.channelIdGetChannel) defaultParams.channelIdGetChannel = nodeData.inputs.channelIdGetChannel + if (nodeData.inputs?.teamIdCreateChannel) defaultParams.teamIdCreateChannel = nodeData.inputs.teamIdCreateChannel + if (nodeData.inputs?.displayNameCreateChannel) defaultParams.displayNameCreateChannel = nodeData.inputs.displayNameCreateChannel + if (nodeData.inputs?.descriptionCreateChannel) defaultParams.descriptionCreateChannel = nodeData.inputs.descriptionCreateChannel + if (nodeData.inputs?.membershipTypeCreateChannel) + defaultParams.membershipTypeCreateChannel = nodeData.inputs.membershipTypeCreateChannel + if (nodeData.inputs?.teamIdUpdateChannel) defaultParams.teamIdUpdateChannel = nodeData.inputs.teamIdUpdateChannel + if (nodeData.inputs?.channelIdUpdateChannel) defaultParams.channelIdUpdateChannel = nodeData.inputs.channelIdUpdateChannel + if (nodeData.inputs?.displayNameUpdateChannel) defaultParams.displayNameUpdateChannel = nodeData.inputs.displayNameUpdateChannel + if (nodeData.inputs?.teamIdDeleteChannel) defaultParams.teamIdDeleteChannel = nodeData.inputs.teamIdDeleteChannel + if (nodeData.inputs?.channelIdDeleteChannel) defaultParams.channelIdDeleteChannel = nodeData.inputs.channelIdDeleteChannel + if (nodeData.inputs?.teamIdChannelMembers) defaultParams.teamIdChannelMembers = nodeData.inputs.teamIdChannelMembers + if (nodeData.inputs?.channelIdChannelMembers) defaultParams.channelIdChannelMembers = nodeData.inputs.channelIdChannelMembers + if (nodeData.inputs?.userIdChannelMember) defaultParams.userIdChannelMember = nodeData.inputs.userIdChannelMember + + // Chat parameters + if (nodeData.inputs?.maxResultsListChats) defaultParams.maxResultsListChats = nodeData.inputs.maxResultsListChats + if (nodeData.inputs?.chatIdGetChat) defaultParams.chatIdGetChat = nodeData.inputs.chatIdGetChat + if (nodeData.inputs?.chatTypeCreateChat) defaultParams.chatTypeCreateChat = nodeData.inputs.chatTypeCreateChat + if (nodeData.inputs?.topicCreateChat) defaultParams.topicCreateChat = nodeData.inputs.topicCreateChat + if (nodeData.inputs?.membersCreateChat) defaultParams.membersCreateChat = nodeData.inputs.membersCreateChat + if (nodeData.inputs?.chatIdUpdateChat) defaultParams.chatIdUpdateChat = nodeData.inputs.chatIdUpdateChat + if (nodeData.inputs?.topicUpdateChat) defaultParams.topicUpdateChat = nodeData.inputs.topicUpdateChat + if (nodeData.inputs?.chatIdDeleteChat) defaultParams.chatIdDeleteChat = nodeData.inputs.chatIdDeleteChat + if (nodeData.inputs?.chatIdChatMembers) defaultParams.chatIdChatMembers = nodeData.inputs.chatIdChatMembers + if (nodeData.inputs?.userIdChatMember) defaultParams.userIdChatMember = nodeData.inputs.userIdChatMember + if (nodeData.inputs?.chatIdPinMessage) defaultParams.chatIdPinMessage = nodeData.inputs.chatIdPinMessage + if (nodeData.inputs?.messageIdPinMessage) defaultParams.messageIdPinMessage = nodeData.inputs.messageIdPinMessage + + // Chat Message parameters + if (nodeData.inputs?.chatChannelIdListMessages) defaultParams.chatChannelIdListMessages = nodeData.inputs.chatChannelIdListMessages + if (nodeData.inputs?.teamIdListMessages) defaultParams.teamIdListMessages = nodeData.inputs.teamIdListMessages + if (nodeData.inputs?.maxResultsListMessages) defaultParams.maxResultsListMessages = nodeData.inputs.maxResultsListMessages + if (nodeData.inputs?.chatChannelIdGetMessage) defaultParams.chatChannelIdGetMessage = nodeData.inputs.chatChannelIdGetMessage + if (nodeData.inputs?.teamIdGetMessage) defaultParams.teamIdGetMessage = nodeData.inputs.teamIdGetMessage + if (nodeData.inputs?.messageIdGetMessage) defaultParams.messageIdGetMessage = nodeData.inputs.messageIdGetMessage + if (nodeData.inputs?.chatChannelIdSendMessage) defaultParams.chatChannelIdSendMessage = nodeData.inputs.chatChannelIdSendMessage + if (nodeData.inputs?.teamIdSendMessage) defaultParams.teamIdSendMessage = nodeData.inputs.teamIdSendMessage + if (nodeData.inputs?.messageBodySendMessage) defaultParams.messageBodySendMessage = nodeData.inputs.messageBodySendMessage + if (nodeData.inputs?.contentTypeSendMessage) defaultParams.contentTypeSendMessage = nodeData.inputs.contentTypeSendMessage + if (nodeData.inputs?.chatChannelIdUpdateMessage) + defaultParams.chatChannelIdUpdateMessage = nodeData.inputs.chatChannelIdUpdateMessage + if (nodeData.inputs?.teamIdUpdateMessage) defaultParams.teamIdUpdateMessage = nodeData.inputs.teamIdUpdateMessage + if (nodeData.inputs?.messageIdUpdateMessage) defaultParams.messageIdUpdateMessage = nodeData.inputs.messageIdUpdateMessage + if (nodeData.inputs?.chatChannelIdDeleteMessage) + defaultParams.chatChannelIdDeleteMessage = nodeData.inputs.chatChannelIdDeleteMessage + if (nodeData.inputs?.teamIdDeleteMessage) defaultParams.teamIdDeleteMessage = nodeData.inputs.teamIdDeleteMessage + if (nodeData.inputs?.messageIdDeleteMessage) defaultParams.messageIdDeleteMessage = nodeData.inputs.messageIdDeleteMessage + if (nodeData.inputs?.chatChannelIdReplyMessage) defaultParams.chatChannelIdReplyMessage = nodeData.inputs.chatChannelIdReplyMessage + if (nodeData.inputs?.teamIdReplyMessage) defaultParams.teamIdReplyMessage = nodeData.inputs.teamIdReplyMessage + if (nodeData.inputs?.messageIdReplyMessage) defaultParams.messageIdReplyMessage = nodeData.inputs.messageIdReplyMessage + if (nodeData.inputs?.replyBodyReplyMessage) defaultParams.replyBodyReplyMessage = nodeData.inputs.replyBodyReplyMessage + if (nodeData.inputs?.chatChannelIdReaction) defaultParams.chatChannelIdReaction = nodeData.inputs.chatChannelIdReaction + if (nodeData.inputs?.teamIdReaction) defaultParams.teamIdReaction = nodeData.inputs.teamIdReaction + if (nodeData.inputs?.messageIdReaction) defaultParams.messageIdReaction = nodeData.inputs.messageIdReaction + if (nodeData.inputs?.reactionTypeSetReaction) defaultParams.reactionTypeSetReaction = nodeData.inputs.reactionTypeSetReaction + + return defaultParams + } +} + +module.exports = { nodeClass: MicrosoftTeams_Tools } diff --git a/packages/components/nodes/tools/MicrosoftTeams/core.ts b/packages/components/nodes/tools/MicrosoftTeams/core.ts new file mode 100644 index 000000000..77c6feaf9 --- /dev/null +++ b/packages/components/nodes/tools/MicrosoftTeams/core.ts @@ -0,0 +1,1669 @@ +import { z } from 'zod' +import { CallbackManagerForToolRun } from '@langchain/core/callbacks/manager' +import { DynamicStructuredTool, DynamicStructuredToolInput } from '../OpenAPIToolkit/core' +import { TOOL_ARGS_PREFIX } from '../../../src/agents' + +interface TeamsToolOptions { + accessToken: string + actions: string[] + defaultParams: any + type: string +} + +const BASE_URL = 'https://graph.microsoft.com/v1.0' + +// Helper function to make Graph API requests +async function makeGraphRequest( + endpoint: string, + method: 'GET' | 'POST' | 'PATCH' | 'DELETE' = 'GET', + body?: any, + accessToken?: string +): Promise { + const headers: Record = { + Authorization: `Bearer ${accessToken}`, + 'Content-Type': 'application/json' + } + + const config: RequestInit = { + method, + headers + } + + if (body && (method === 'POST' || method === 'PATCH')) { + config.body = JSON.stringify(body) + } + + try { + const response = await fetch(`${BASE_URL}${endpoint}`, config) + + if (!response.ok) { + const errorText = await response.text() + throw new Error(`Microsoft Graph API error: ${response.status} ${response.statusText} - ${errorText}`) + } + + // Handle empty responses for DELETE operations + if (method === 'DELETE' || response.status === 204) { + return { success: true, message: 'Operation completed successfully' } + } + + return await response.json() + } catch (error) { + throw new Error(`Microsoft Graph request failed: ${error instanceof Error ? error.message : 'Unknown error'}`) + } +} + +// Base Teams Tool class +abstract class BaseTeamsTool extends DynamicStructuredTool { + accessToken = '' + protected defaultParams: any + + constructor(args: DynamicStructuredToolInput & { accessToken?: string; defaultParams?: any }) { + super(args) + this.accessToken = args.accessToken ?? '' + this.defaultParams = args.defaultParams || {} + } + + protected async makeTeamsRequest(endpoint: string, method: string = 'GET', body?: any) { + return await makeGraphRequest(endpoint, method as any, body, this.accessToken) + } + + protected formatResponse(data: any, params: any): string { + return JSON.stringify(data) + TOOL_ARGS_PREFIX + JSON.stringify(params) + } + + // Abstract method that must be implemented by subclasses + protected abstract _call(arg: any, runManager?: CallbackManagerForToolRun, parentConfig?: any): Promise +} + +// CHANNEL TOOLS + +class ListChannelsTool extends BaseTeamsTool { + constructor(args: { accessToken?: string; defaultParams?: any }) { + const toolInput: DynamicStructuredToolInput = { + name: 'list_channels', + description: 'List all channels in a team', + schema: z.object({ + teamId: z.string().describe('ID of the team to list channels from'), + maxResults: z.number().optional().default(50).describe('Maximum number of channels to return') + }), + baseUrl: BASE_URL, + method: 'GET', + headers: {} + } + + super({ ...toolInput, accessToken: args.accessToken, defaultParams: args.defaultParams }) + } + + protected async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const { teamId, maxResults = 50 } = params + + if (!teamId) { + throw new Error('Team ID is required to list channels') + } + + try { + const endpoint = `/teams/${teamId}/channels` + const result = await this.makeTeamsRequest(endpoint) + + // Filter results to maxResults on client side since $top is not supported + const channels = result.value || [] + const limitedChannels = channels.slice(0, maxResults) + + const responseData = { + success: true, + channels: limitedChannels, + count: limitedChannels.length, + total: channels.length + } + + return this.formatResponse(responseData, params) + } catch (error) { + return this.formatResponse(`Error listing channels: ${error}`, params) + } + } +} + +class GetChannelTool extends BaseTeamsTool { + constructor(args: { accessToken?: string; defaultParams?: any }) { + const toolInput: DynamicStructuredToolInput = { + name: 'get_channel', + description: 'Get details of a specific channel', + schema: z.object({ + teamId: z.string().describe('ID of the team that contains the channel'), + channelId: z.string().describe('ID of the channel to retrieve') + }), + baseUrl: BASE_URL, + method: 'GET', + headers: {} + } + + super({ ...toolInput, accessToken: args.accessToken, defaultParams: args.defaultParams }) + } + + protected async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const { teamId, channelId } = params + + if (!teamId || !channelId) { + throw new Error('Both Team ID and Channel ID are required') + } + + try { + const endpoint = `/teams/${teamId}/channels/${channelId}` + const result = await this.makeTeamsRequest(endpoint) + + return this.formatResponse( + { + success: true, + channel: result + }, + params + ) + } catch (error) { + return this.formatResponse(`Error getting channel: ${error}`, params) + } + } +} + +class CreateChannelTool extends BaseTeamsTool { + constructor(args: { accessToken?: string; defaultParams?: any }) { + const toolInput: DynamicStructuredToolInput = { + name: 'create_channel', + description: 'Create a new channel in a team', + schema: z.object({ + teamId: z.string().describe('ID of the team to create the channel in'), + displayName: z.string().describe('Display name of the channel'), + description: z.string().optional().describe('Description of the channel'), + membershipType: z + .enum(['standard', 'private', 'shared']) + .optional() + .default('standard') + .describe('Type of channel membership') + }), + baseUrl: BASE_URL, + method: 'POST', + headers: {} + } + + super({ ...toolInput, accessToken: args.accessToken, defaultParams: args.defaultParams }) + } + + protected async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const { teamId, displayName, description, membershipType = 'standard' } = params + + if (!teamId || !displayName) { + throw new Error('Team ID and Display Name are required to create a channel') + } + + try { + const body = { + displayName, + membershipType, + ...(description && { description }) + } + + const endpoint = `/teams/${teamId}/channels` + const result = await this.makeTeamsRequest(endpoint, 'POST', body) + + return this.formatResponse( + { + success: true, + channel: result, + message: `Channel "${displayName}" created successfully` + }, + params + ) + } catch (error) { + return this.formatResponse(`Error creating channel: ${error}`, params) + } + } +} + +class UpdateChannelTool extends BaseTeamsTool { + constructor(args: { accessToken?: string; defaultParams?: any }) { + const toolInput: DynamicStructuredToolInput = { + name: 'update_channel', + description: 'Update an existing channel', + schema: z.object({ + teamId: z.string().describe('ID of the team that contains the channel'), + channelId: z.string().describe('ID of the channel to update'), + displayName: z.string().optional().describe('New display name of the channel'), + description: z.string().optional().describe('New description of the channel') + }), + baseUrl: BASE_URL, + method: 'PATCH', + headers: {} + } + + super({ ...toolInput, accessToken: args.accessToken, defaultParams: args.defaultParams }) + } + + protected async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const { teamId, channelId, displayName, description } = params + + if (!teamId || !channelId) { + throw new Error('Both Team ID and Channel ID are required') + } + + try { + const body: any = {} + if (displayName) body.displayName = displayName + if (description) body.description = description + + if (Object.keys(body).length === 0) { + throw new Error('At least one field to update must be provided') + } + + const endpoint = `/teams/${teamId}/channels/${channelId}` + await this.makeTeamsRequest(endpoint, 'PATCH', body) + + return this.formatResponse( + { + success: true, + message: 'Channel updated successfully' + }, + params + ) + } catch (error) { + return this.formatResponse(`Error updating channel: ${error}`, params) + } + } +} + +class DeleteChannelTool extends BaseTeamsTool { + constructor(args: { accessToken?: string; defaultParams?: any }) { + const toolInput: DynamicStructuredToolInput = { + name: 'delete_channel', + description: 'Delete a channel from a team', + schema: z.object({ + teamId: z.string().describe('ID of the team that contains the channel'), + channelId: z.string().describe('ID of the channel to delete') + }), + baseUrl: BASE_URL, + method: 'DELETE', + headers: {} + } + + super({ ...toolInput, accessToken: args.accessToken, defaultParams: args.defaultParams }) + } + + protected async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const { teamId, channelId } = params + + if (!teamId || !channelId) { + throw new Error('Both Team ID and Channel ID are required') + } + + try { + const endpoint = `/teams/${teamId}/channels/${channelId}` + await this.makeTeamsRequest(endpoint, 'DELETE') + + return this.formatResponse( + { + success: true, + message: 'Channel deleted successfully' + }, + params + ) + } catch (error) { + return this.formatResponse(`Error deleting channel: ${error}`, params) + } + } +} + +class ArchiveChannelTool extends BaseTeamsTool { + constructor(args: { accessToken?: string; defaultParams?: any }) { + const toolInput: DynamicStructuredToolInput = { + name: 'archive_channel', + description: 'Archive a channel in a team', + schema: z.object({ + teamId: z.string().describe('ID of the team that contains the channel'), + channelId: z.string().describe('ID of the channel to archive') + }), + baseUrl: BASE_URL, + method: 'POST', + headers: {} + } + + super({ ...toolInput, accessToken: args.accessToken, defaultParams: args.defaultParams }) + } + + protected async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const { teamId, channelId } = params + + if (!teamId || !channelId) { + throw new Error('Both Team ID and Channel ID are required') + } + + try { + const endpoint = `/teams/${teamId}/channels/${channelId}/archive` + await this.makeTeamsRequest(endpoint, 'POST', {}) + + return this.formatResponse( + { + success: true, + message: 'Channel archived successfully' + }, + params + ) + } catch (error) { + return this.formatResponse(`Error archiving channel: ${error}`, params) + } + } +} + +class UnarchiveChannelTool extends BaseTeamsTool { + constructor(args: { accessToken?: string; defaultParams?: any }) { + const toolInput: DynamicStructuredToolInput = { + name: 'unarchive_channel', + description: 'Unarchive a channel in a team', + schema: z.object({ + teamId: z.string().describe('ID of the team that contains the channel'), + channelId: z.string().describe('ID of the channel to unarchive') + }), + baseUrl: BASE_URL, + method: 'POST', + headers: {} + } + + super({ ...toolInput, accessToken: args.accessToken, defaultParams: args.defaultParams }) + } + + protected async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const { teamId, channelId } = params + + if (!teamId || !channelId) { + throw new Error('Both Team ID and Channel ID are required') + } + + try { + const endpoint = `/teams/${teamId}/channels/${channelId}/unarchive` + await this.makeTeamsRequest(endpoint, 'POST', {}) + + return this.formatResponse( + { + success: true, + message: 'Channel unarchived successfully' + }, + params + ) + } catch (error) { + return this.formatResponse(`Error unarchiving channel: ${error}`, params) + } + } +} + +class ListChannelMembersTool extends BaseTeamsTool { + constructor(args: { accessToken?: string; defaultParams?: any }) { + const toolInput: DynamicStructuredToolInput = { + name: 'list_channel_members', + description: 'List members of a channel', + schema: z.object({ + teamId: z.string().describe('ID of the team that contains the channel'), + channelId: z.string().describe('ID of the channel') + }), + baseUrl: BASE_URL, + method: 'GET', + headers: {} + } + + super({ ...toolInput, accessToken: args.accessToken, defaultParams: args.defaultParams }) + } + + protected async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const { teamId, channelId } = params + + if (!teamId || !channelId) { + throw new Error('Both Team ID and Channel ID are required') + } + + try { + const endpoint = `/teams/${teamId}/channels/${channelId}/members` + const result = await this.makeTeamsRequest(endpoint) + + return this.formatResponse( + { + success: true, + members: result.value || [], + count: result.value?.length || 0 + }, + params + ) + } catch (error) { + return this.formatResponse(`Error listing channel members: ${error}`, params) + } + } +} + +class AddChannelMemberTool extends BaseTeamsTool { + constructor(args: { accessToken?: string; defaultParams?: any }) { + const toolInput: DynamicStructuredToolInput = { + name: 'add_channel_member', + description: 'Add a member to a channel', + schema: z.object({ + teamId: z.string().describe('ID of the team that contains the channel'), + channelId: z.string().describe('ID of the channel'), + userId: z.string().describe('ID of the user to add') + }), + baseUrl: BASE_URL, + method: 'POST', + headers: {} + } + + super({ ...toolInput, accessToken: args.accessToken, defaultParams: args.defaultParams }) + } + + protected async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const { teamId, channelId, userId } = params + + if (!teamId || !channelId || !userId) { + throw new Error('Team ID, Channel ID, and User ID are all required') + } + + try { + const body = { + '@odata.type': '#microsoft.graph.aadUserConversationMember', + 'user@odata.bind': `https://graph.microsoft.com/v1.0/users('${userId}')` + } + + const endpoint = `/teams/${teamId}/channels/${channelId}/members` + await this.makeTeamsRequest(endpoint, 'POST', body) + + return this.formatResponse( + { + success: true, + message: 'Member added to channel successfully' + }, + params + ) + } catch (error) { + return this.formatResponse(`Error adding channel member: ${error}`, params) + } + } +} + +class RemoveChannelMemberTool extends BaseTeamsTool { + constructor(args: { accessToken?: string; defaultParams?: any }) { + const toolInput: DynamicStructuredToolInput = { + name: 'remove_channel_member', + description: 'Remove a member from a channel', + schema: z.object({ + teamId: z.string().describe('ID of the team that contains the channel'), + channelId: z.string().describe('ID of the channel'), + userId: z.string().describe('ID of the user to remove') + }), + baseUrl: BASE_URL, + method: 'DELETE', + headers: {} + } + + super({ ...toolInput, accessToken: args.accessToken, defaultParams: args.defaultParams }) + } + + protected async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const { teamId, channelId, userId } = params + + if (!teamId || !channelId || !userId) { + throw new Error('Team ID, Channel ID, and User ID are all required') + } + + try { + // First get the membership ID + const membersEndpoint = `/teams/${teamId}/channels/${channelId}/members` + const membersResult = await this.makeTeamsRequest(membersEndpoint) + + const member = membersResult.value?.find((m: any) => m.userId === userId) + if (!member) { + throw new Error('User is not a member of this channel') + } + + const endpoint = `/teams/${teamId}/channels/${channelId}/members/${member.id}` + await this.makeTeamsRequest(endpoint, 'DELETE') + + return this.formatResponse( + { + success: true, + message: 'Member removed from channel successfully' + }, + params + ) + } catch (error) { + return this.formatResponse(`Error removing channel member: ${error}`, params) + } + } +} + +// CHAT TOOLS + +class ListChatsTool extends BaseTeamsTool { + constructor(args: { accessToken?: string; defaultParams?: any }) { + const toolInput: DynamicStructuredToolInput = { + name: 'list_chats', + description: 'List all chats for the current user', + schema: z.object({ + maxResults: z.number().optional().default(50).describe('Maximum number of chats to return') + }), + baseUrl: BASE_URL, + method: 'GET', + headers: {} + } + + super({ ...toolInput, accessToken: args.accessToken, defaultParams: args.defaultParams }) + } + + protected async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const { maxResults = 50 } = params + + try { + const endpoint = `/me/chats?$top=${maxResults}` + const result = await this.makeTeamsRequest(endpoint) + + return this.formatResponse( + { + success: true, + chats: result.value || [], + count: result.value?.length || 0 + }, + params + ) + } catch (error) { + return this.formatResponse(`Error listing chats: ${error}`, params) + } + } +} + +class GetChatTool extends BaseTeamsTool { + constructor(args: { accessToken?: string; defaultParams?: any }) { + const toolInput: DynamicStructuredToolInput = { + name: 'get_chat', + description: 'Get details of a specific chat', + schema: z.object({ + chatId: z.string().describe('ID of the chat to retrieve') + }), + baseUrl: BASE_URL, + method: 'GET', + headers: {} + } + + super({ ...toolInput, accessToken: args.accessToken, defaultParams: args.defaultParams }) + } + + protected async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const { chatId } = params + + if (!chatId) { + throw new Error('Chat ID is required') + } + + try { + const endpoint = `/chats/${chatId}` + const result = await this.makeTeamsRequest(endpoint) + + return this.formatResponse( + { + success: true, + chat: result + }, + params + ) + } catch (error) { + return this.formatResponse(`Error getting chat: ${error}`, params) + } + } +} + +class CreateChatTool extends BaseTeamsTool { + constructor(args: { accessToken?: string; defaultParams?: any }) { + const toolInput: DynamicStructuredToolInput = { + name: 'create_chat', + description: 'Create a new chat', + schema: z.object({ + chatType: z.enum(['oneOnOne', 'group']).optional().default('group').describe('Type of chat to create'), + topic: z.string().optional().describe('Topic/subject of the chat (for group chats)'), + members: z.string().describe('Comma-separated list of user IDs to add to the chat') + }), + baseUrl: BASE_URL, + method: 'POST', + headers: {} + } + + super({ ...toolInput, accessToken: args.accessToken, defaultParams: args.defaultParams }) + } + + protected async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const { chatType = 'group', topic, members } = params + + if (!members) { + throw new Error('Members list is required to create a chat') + } + + try { + const memberIds = members.split(',').map((id: string) => id.trim()) + const chatMembers = memberIds.map((userId: string) => ({ + '@odata.type': '#microsoft.graph.aadUserConversationMember', + 'user@odata.bind': `https://graph.microsoft.com/v1.0/users('${userId}')` + })) + + const body: any = { + chatType, + members: chatMembers + } + + if (topic && chatType === 'group') { + body.topic = topic + } + + const endpoint = '/chats' + const result = await this.makeTeamsRequest(endpoint, 'POST', body) + + return this.formatResponse( + { + success: true, + chat: result, + message: 'Chat created successfully' + }, + params + ) + } catch (error) { + return this.formatResponse(`Error creating chat: ${error}`, params) + } + } +} + +class UpdateChatTool extends BaseTeamsTool { + constructor(args: { accessToken?: string; defaultParams?: any }) { + const toolInput: DynamicStructuredToolInput = { + name: 'update_chat', + description: 'Update an existing chat', + schema: z.object({ + chatId: z.string().describe('ID of the chat to update'), + topic: z.string().describe('New topic/subject of the chat') + }), + baseUrl: BASE_URL, + method: 'PATCH', + headers: {} + } + + super({ ...toolInput, accessToken: args.accessToken, defaultParams: args.defaultParams }) + } + + protected async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const { chatId, topic } = params + + if (!chatId) { + throw new Error('Chat ID is required') + } + + if (!topic) { + throw new Error('Topic is required to update a chat') + } + + try { + const body = { topic } + const endpoint = `/chats/${chatId}` + await this.makeTeamsRequest(endpoint, 'PATCH', body) + + return this.formatResponse( + { + success: true, + message: 'Chat updated successfully' + }, + params + ) + } catch (error) { + return this.formatResponse(`Error updating chat: ${error}`, params) + } + } +} + +class DeleteChatTool extends BaseTeamsTool { + constructor(args: { accessToken?: string; defaultParams?: any }) { + const toolInput: DynamicStructuredToolInput = { + name: 'delete_chat', + description: 'Delete a chat', + schema: z.object({ + chatId: z.string().describe('ID of the chat to delete') + }), + baseUrl: BASE_URL, + method: 'DELETE', + headers: {} + } + + super({ ...toolInput, accessToken: args.accessToken, defaultParams: args.defaultParams }) + } + + protected async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const { chatId } = params + + if (!chatId) { + throw new Error('Chat ID is required') + } + + try { + const endpoint = `/chats/${chatId}` + await this.makeTeamsRequest(endpoint, 'DELETE') + + return this.formatResponse( + { + success: true, + message: 'Chat deleted successfully' + }, + params + ) + } catch (error) { + return this.formatResponse(`Error deleting chat: ${error}`, params) + } + } +} + +class ListChatMembersTool extends BaseTeamsTool { + constructor(args: { accessToken?: string; defaultParams?: any }) { + const toolInput: DynamicStructuredToolInput = { + name: 'list_chat_members', + description: 'List members of a chat', + schema: z.object({ + chatId: z.string().describe('ID of the chat') + }), + baseUrl: BASE_URL, + method: 'GET', + headers: {} + } + + super({ ...toolInput, accessToken: args.accessToken, defaultParams: args.defaultParams }) + } + + protected async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const { chatId } = params + + if (!chatId) { + throw new Error('Chat ID is required') + } + + try { + const endpoint = `/chats/${chatId}/members` + const result = await this.makeTeamsRequest(endpoint) + + return this.formatResponse( + { + success: true, + members: result.value || [], + count: result.value?.length || 0 + }, + params + ) + } catch (error) { + return this.formatResponse(`Error listing chat members: ${error}`, params) + } + } +} + +class AddChatMemberTool extends BaseTeamsTool { + constructor(args: { accessToken?: string; defaultParams?: any }) { + const toolInput: DynamicStructuredToolInput = { + name: 'add_chat_member', + description: 'Add a member to a chat', + schema: z.object({ + chatId: z.string().describe('ID of the chat'), + userId: z.string().describe('ID of the user to add') + }), + baseUrl: BASE_URL, + method: 'POST', + headers: {} + } + + super({ ...toolInput, accessToken: args.accessToken, defaultParams: args.defaultParams }) + } + + protected async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const { chatId, userId } = params + + if (!chatId || !userId) { + throw new Error('Both Chat ID and User ID are required') + } + + try { + const body = { + '@odata.type': '#microsoft.graph.aadUserConversationMember', + 'user@odata.bind': `https://graph.microsoft.com/v1.0/users('${userId}')` + } + + const endpoint = `/chats/${chatId}/members` + await this.makeTeamsRequest(endpoint, 'POST', body) + + return this.formatResponse( + { + success: true, + message: 'Member added to chat successfully' + }, + params + ) + } catch (error) { + return this.formatResponse(`Error adding chat member: ${error}`, params) + } + } +} + +class RemoveChatMemberTool extends BaseTeamsTool { + constructor(args: { accessToken?: string; defaultParams?: any }) { + const toolInput: DynamicStructuredToolInput = { + name: 'remove_chat_member', + description: 'Remove a member from a chat', + schema: z.object({ + chatId: z.string().describe('ID of the chat'), + userId: z.string().describe('ID of the user to remove') + }), + baseUrl: BASE_URL, + method: 'DELETE', + headers: {} + } + + super({ ...toolInput, accessToken: args.accessToken, defaultParams: args.defaultParams }) + } + + protected async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const { chatId, userId } = params + + if (!chatId || !userId) { + throw new Error('Both Chat ID and User ID are required') + } + + try { + // First get the membership ID + const membersEndpoint = `/chats/${chatId}/members` + const membersResult = await this.makeTeamsRequest(membersEndpoint) + + const member = membersResult.value?.find((m: any) => m.userId === userId) + if (!member) { + throw new Error('User is not a member of this chat') + } + + const endpoint = `/chats/${chatId}/members/${member.id}` + await this.makeTeamsRequest(endpoint, 'DELETE') + + return this.formatResponse( + { + success: true, + message: 'Member removed from chat successfully' + }, + params + ) + } catch (error) { + return this.formatResponse(`Error removing chat member: ${error}`, params) + } + } +} + +class PinMessageTool extends BaseTeamsTool { + constructor(args: { accessToken?: string; defaultParams?: any }) { + const toolInput: DynamicStructuredToolInput = { + name: 'pin_message', + description: 'Pin a message in a chat', + schema: z.object({ + chatId: z.string().describe('ID of the chat'), + messageId: z.string().describe('ID of the message to pin') + }), + baseUrl: BASE_URL, + method: 'POST', + headers: {} + } + + super({ ...toolInput, accessToken: args.accessToken, defaultParams: args.defaultParams }) + } + + protected async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const { chatId, messageId } = params + + if (!chatId || !messageId) { + throw new Error('Both Chat ID and Message ID are required') + } + + try { + const body = { + message: { + '@odata.bind': `https://graph.microsoft.com/v1.0/chats('${chatId}')/messages('${messageId}')` + } + } + + const endpoint = `/chats/${chatId}/pinnedMessages` + await this.makeTeamsRequest(endpoint, 'POST', body) + + return this.formatResponse( + { + success: true, + message: 'Message pinned successfully' + }, + params + ) + } catch (error) { + return this.formatResponse(`Error pinning message: ${error}`, params) + } + } +} + +class UnpinMessageTool extends BaseTeamsTool { + constructor(args: { accessToken?: string; defaultParams?: any }) { + const toolInput: DynamicStructuredToolInput = { + name: 'unpin_message', + description: 'Unpin a message from a chat', + schema: z.object({ + chatId: z.string().describe('ID of the chat'), + messageId: z.string().describe('ID of the message to unpin') + }), + baseUrl: BASE_URL, + method: 'DELETE', + headers: {} + } + + super({ ...toolInput, accessToken: args.accessToken, defaultParams: args.defaultParams }) + } + + protected async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const { chatId, messageId } = params + + if (!chatId || !messageId) { + throw new Error('Both Chat ID and Message ID are required') + } + + try { + // First get the pinned messages to find the pinned message ID + const pinnedEndpoint = `/chats/${chatId}/pinnedMessages` + const pinnedResult = await this.makeTeamsRequest(pinnedEndpoint) + + const pinnedMessage = pinnedResult.value?.find((pm: any) => pm.message?.id === messageId) + if (!pinnedMessage) { + throw new Error('Message is not pinned in this chat') + } + + const endpoint = `/chats/${chatId}/pinnedMessages/${pinnedMessage.id}` + await this.makeTeamsRequest(endpoint, 'DELETE') + + return this.formatResponse( + { + success: true, + message: 'Message unpinned successfully' + }, + params + ) + } catch (error) { + return this.formatResponse(`Error unpinning message: ${error}`, params) + } + } +} + +// CHAT MESSAGE TOOLS + +class ListMessagesTool extends BaseTeamsTool { + constructor(args: { accessToken?: string; defaultParams?: any }) { + const toolInput: DynamicStructuredToolInput = { + name: 'list_messages', + description: 'List messages in a chat or channel', + schema: z.object({ + chatChannelId: z.string().describe('ID of the chat or channel to list messages from'), + teamId: z.string().optional().describe('ID of the team (required for channel messages)'), + maxResults: z.number().optional().default(50).describe('Maximum number of messages to return') + }), + baseUrl: BASE_URL, + method: 'GET', + headers: {} + } + + super({ ...toolInput, accessToken: args.accessToken, defaultParams: args.defaultParams }) + } + + protected async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const { chatChannelId, teamId, maxResults = 50 } = params + + if (!chatChannelId) { + throw new Error('Chat or Channel ID is required') + } + + try { + let endpoint: string + if (teamId) { + // Channel messages + endpoint = `/teams/${teamId}/channels/${chatChannelId}/messages?$top=${maxResults}` + } else { + // Chat messages + endpoint = `/chats/${chatChannelId}/messages?$top=${maxResults}` + } + + const result = await this.makeTeamsRequest(endpoint) + + return this.formatResponse( + { + success: true, + messages: result.value || [], + count: result.value?.length || 0, + context: teamId ? 'channel' : 'chat' + }, + params + ) + } catch (error) { + return this.formatResponse(`Error listing messages: ${error}`, params) + } + } +} + +class GetMessageTool extends BaseTeamsTool { + constructor(args: { accessToken?: string; defaultParams?: any }) { + const toolInput: DynamicStructuredToolInput = { + name: 'get_message', + description: 'Get details of a specific message', + schema: z.object({ + chatChannelId: z.string().describe('ID of the chat or channel'), + teamId: z.string().optional().describe('ID of the team (required for channel messages)'), + messageId: z.string().describe('ID of the message to retrieve') + }), + baseUrl: BASE_URL, + method: 'GET', + headers: {} + } + + super({ ...toolInput, accessToken: args.accessToken, defaultParams: args.defaultParams }) + } + + protected async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const { chatChannelId, teamId, messageId } = params + + if (!chatChannelId || !messageId) { + throw new Error('Chat/Channel ID and Message ID are required') + } + + try { + let endpoint: string + if (teamId) { + // Channel message + endpoint = `/teams/${teamId}/channels/${chatChannelId}/messages/${messageId}` + } else { + // Chat message + endpoint = `/chats/${chatChannelId}/messages/${messageId}` + } + + const result = await this.makeTeamsRequest(endpoint) + + return this.formatResponse( + { + success: true, + message: result, + context: teamId ? 'channel' : 'chat' + }, + params + ) + } catch (error) { + return this.formatResponse(`Error getting message: ${error}`, params) + } + } +} + +class SendMessageTool extends BaseTeamsTool { + constructor(args: { accessToken?: string; defaultParams?: any }) { + const toolInput: DynamicStructuredToolInput = { + name: 'send_message', + description: 'Send a message to a chat or channel', + schema: z.object({ + chatChannelId: z.string().describe('ID of the chat or channel to send message to'), + teamId: z.string().optional().describe('ID of the team (required for channel messages)'), + messageBody: z.string().describe('Content of the message'), + contentType: z.enum(['text', 'html']).optional().default('text').describe('Content type of the message') + }), + baseUrl: BASE_URL, + method: 'POST', + headers: {} + } + + super({ ...toolInput, accessToken: args.accessToken, defaultParams: args.defaultParams }) + } + + protected async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const { chatChannelId, teamId, messageBody, contentType = 'text' } = params + + if (!chatChannelId || !messageBody) { + throw new Error('Chat/Channel ID and Message Body are required') + } + + try { + const body = { + body: { + contentType, + content: messageBody + } + } + + let endpoint: string + if (teamId) { + // Channel message + endpoint = `/teams/${teamId}/channels/${chatChannelId}/messages` + } else { + // Chat message + endpoint = `/chats/${chatChannelId}/messages` + } + + const result = await this.makeTeamsRequest(endpoint, 'POST', body) + + return this.formatResponse( + { + success: true, + message: result, + context: teamId ? 'channel' : 'chat', + messageText: 'Message sent successfully' + }, + params + ) + } catch (error) { + return this.formatResponse(`Error sending message: ${error}`, params) + } + } +} + +class UpdateMessageTool extends BaseTeamsTool { + constructor(args: { accessToken?: string; defaultParams?: any }) { + const toolInput: DynamicStructuredToolInput = { + name: 'update_message', + description: 'Update an existing message', + schema: z.object({ + chatChannelId: z.string().describe('ID of the chat or channel'), + teamId: z.string().optional().describe('ID of the team (required for channel messages)'), + messageId: z.string().describe('ID of the message to update') + }), + baseUrl: BASE_URL, + method: 'PATCH', + headers: {} + } + + super({ ...toolInput, accessToken: args.accessToken, defaultParams: args.defaultParams }) + } + + protected async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const { chatChannelId, teamId, messageId } = params + + if (!chatChannelId || !messageId) { + throw new Error('Chat/Channel ID and Message ID are required') + } + + try { + // Note: Message update is primarily for policy violations in Teams + const body = { + policyViolation: null + } + + let endpoint: string + if (teamId) { + // Channel message + endpoint = `/teams/${teamId}/channels/${chatChannelId}/messages/${messageId}` + } else { + // Chat message + endpoint = `/chats/${chatChannelId}/messages/${messageId}` + } + + await this.makeTeamsRequest(endpoint, 'PATCH', body) + + return this.formatResponse( + { + success: true, + message: 'Message updated successfully', + context: teamId ? 'channel' : 'chat' + }, + params + ) + } catch (error) { + return this.formatResponse(`Error updating message: ${error}`, params) + } + } +} + +class DeleteMessageTool extends BaseTeamsTool { + constructor(args: { accessToken?: string; defaultParams?: any }) { + const toolInput: DynamicStructuredToolInput = { + name: 'delete_message', + description: 'Delete a message', + schema: z.object({ + chatChannelId: z.string().describe('ID of the chat or channel'), + teamId: z.string().optional().describe('ID of the team (required for channel messages)'), + messageId: z.string().describe('ID of the message to delete') + }), + baseUrl: BASE_URL, + method: 'DELETE', + headers: {} + } + + super({ ...toolInput, accessToken: args.accessToken, defaultParams: args.defaultParams }) + } + + protected async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const { chatChannelId, teamId, messageId } = params + + if (!chatChannelId || !messageId) { + throw new Error('Chat/Channel ID and Message ID are required') + } + + try { + let endpoint: string + if (teamId) { + // Channel message - use soft delete + endpoint = `/teams/${teamId}/channels/${chatChannelId}/messages/${messageId}/softDelete` + } else { + // Chat message - use soft delete + endpoint = `/chats/${chatChannelId}/messages/${messageId}/softDelete` + } + + await this.makeTeamsRequest(endpoint, 'POST', {}) + + return this.formatResponse( + { + success: true, + message: 'Message deleted successfully', + context: teamId ? 'channel' : 'chat' + }, + params + ) + } catch (error) { + return this.formatResponse(`Error deleting message: ${error}`, params) + } + } +} + +class ReplyToMessageTool extends BaseTeamsTool { + constructor(args: { accessToken?: string; defaultParams?: any }) { + const toolInput: DynamicStructuredToolInput = { + name: 'reply_to_message', + description: 'Reply to a message in a chat or channel', + schema: z.object({ + chatChannelId: z.string().describe('ID of the chat or channel'), + teamId: z.string().optional().describe('ID of the team (required for channel messages)'), + messageId: z.string().describe('ID of the message to reply to'), + replyBody: z.string().describe('Content of the reply'), + contentType: z.enum(['text', 'html']).optional().default('text').describe('Content type of the reply') + }), + baseUrl: BASE_URL, + method: 'POST', + headers: {} + } + + super({ ...toolInput, accessToken: args.accessToken, defaultParams: args.defaultParams }) + } + + protected async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const { chatChannelId, teamId, messageId, replyBody, contentType = 'text' } = params + + if (!chatChannelId || !messageId || !replyBody) { + throw new Error('Chat/Channel ID, Message ID, and Reply Body are required') + } + + try { + const body = { + body: { + contentType, + content: replyBody + } + } + + let endpoint: string + if (teamId) { + // Channel message reply + endpoint = `/teams/${teamId}/channels/${chatChannelId}/messages/${messageId}/replies` + } else { + // For chat messages, replies are just new messages + endpoint = `/chats/${chatChannelId}/messages` + } + + const result = await this.makeTeamsRequest(endpoint, 'POST', body) + + return this.formatResponse( + { + success: true, + reply: result, + message: 'Reply sent successfully', + context: teamId ? 'channel' : 'chat' + }, + params + ) + } catch (error) { + return this.formatResponse(`Error replying to message: ${error}`, params) + } + } +} + +class SetReactionTool extends BaseTeamsTool { + constructor(args: { accessToken?: string; defaultParams?: any }) { + const toolInput: DynamicStructuredToolInput = { + name: 'set_reaction', + description: 'Set a reaction to a message', + schema: z.object({ + chatChannelId: z.string().describe('ID of the chat or channel'), + teamId: z.string().optional().describe('ID of the team (required for channel messages)'), + messageId: z.string().describe('ID of the message to react to'), + reactionType: z + .enum(['like', 'heart', 'laugh', 'surprised', 'sad', 'angry']) + .optional() + .default('like') + .describe('Type of reaction to set') + }), + baseUrl: BASE_URL, + method: 'POST', + headers: {} + } + + super({ ...toolInput, accessToken: args.accessToken, defaultParams: args.defaultParams }) + } + + protected async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const { chatChannelId, teamId, messageId, reactionType = 'like' } = params + + if (!chatChannelId || !messageId) { + throw new Error('Chat/Channel ID and Message ID are required') + } + + try { + let endpoint: string + if (teamId) { + // Channel message + endpoint = `/teams/${teamId}/channels/${chatChannelId}/messages/${messageId}/setReaction` + } else { + // Chat message + endpoint = `/chats/${chatChannelId}/messages/${messageId}/setReaction` + } + + const body = { + reactionType + } + + await this.makeTeamsRequest(endpoint, 'POST', body) + + return this.formatResponse( + { + success: true, + message: `Reaction "${reactionType}" set successfully`, + context: teamId ? 'channel' : 'chat' + }, + params + ) + } catch (error) { + return this.formatResponse(`Error setting reaction: ${error}`, params) + } + } +} + +class UnsetReactionTool extends BaseTeamsTool { + constructor(args: { accessToken?: string; defaultParams?: any }) { + const toolInput: DynamicStructuredToolInput = { + name: 'unset_reaction', + description: 'Remove a reaction from a message', + schema: z.object({ + chatChannelId: z.string().describe('ID of the chat or channel'), + teamId: z.string().optional().describe('ID of the team (required for channel messages)'), + messageId: z.string().describe('ID of the message to remove reaction from'), + reactionType: z + .enum(['like', 'heart', 'laugh', 'surprised', 'sad', 'angry']) + .optional() + .default('like') + .describe('Type of reaction to remove') + }), + baseUrl: BASE_URL, + method: 'POST', + headers: {} + } + + super({ ...toolInput, accessToken: args.accessToken, defaultParams: args.defaultParams }) + } + + protected async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const { chatChannelId, teamId, messageId, reactionType = 'like' } = params + + if (!chatChannelId || !messageId) { + throw new Error('Chat/Channel ID and Message ID are required') + } + + try { + let endpoint: string + if (teamId) { + // Channel message + endpoint = `/teams/${teamId}/channels/${chatChannelId}/messages/${messageId}/unsetReaction` + } else { + // Chat message + endpoint = `/chats/${chatChannelId}/messages/${messageId}/unsetReaction` + } + + const body = { + reactionType + } + + await this.makeTeamsRequest(endpoint, 'POST', body) + + return this.formatResponse( + { + success: true, + message: `Reaction "${reactionType}" removed successfully`, + context: teamId ? 'channel' : 'chat' + }, + params + ) + } catch (error) { + return this.formatResponse(`Error unsetting reaction: ${error}`, params) + } + } +} + +class GetAllMessagesTool extends BaseTeamsTool { + constructor(args: { accessToken?: string; defaultParams?: any }) { + const toolInput: DynamicStructuredToolInput = { + name: 'get_all_messages', + description: 'Get messages across all chats and channels for the user', + schema: z.object({ + maxResults: z.number().optional().default(50).describe('Maximum number of messages to return') + }), + baseUrl: BASE_URL, + method: 'GET', + headers: {} + } + + super({ ...toolInput, accessToken: args.accessToken, defaultParams: args.defaultParams }) + } + + protected async _call(arg: any): Promise { + const params = { ...arg, ...this.defaultParams } + const { maxResults = 50 } = params + + try { + // Get messages from all chats + const chatEndpoint = `/me/chats/getAllMessages?$top=${maxResults}` + const chatResult = await this.makeTeamsRequest(chatEndpoint) + + return this.formatResponse( + { + success: true, + messages: chatResult.value || [], + count: chatResult.value?.length || 0, + source: 'all_chats_and_channels' + }, + params + ) + } catch (error) { + return this.formatResponse(`Error getting all messages: ${error}`, params) + } + } +} + +// Main function to create Teams tools +export function createTeamsTools(options: TeamsToolOptions): DynamicStructuredTool[] { + const tools: DynamicStructuredTool[] = [] + const actions = options.actions || [] + const accessToken = options.accessToken || '' + const defaultParams = options.defaultParams || {} + + // Channel tools + if (actions.includes('listChannels')) { + const listTool = new ListChannelsTool({ accessToken, defaultParams }) + tools.push(listTool) + } + + if (actions.includes('getChannel')) { + const getTool = new GetChannelTool({ accessToken, defaultParams }) + tools.push(getTool) + } + + if (actions.includes('createChannel')) { + const createTool = new CreateChannelTool({ accessToken, defaultParams }) + tools.push(createTool) + } + + if (actions.includes('updateChannel')) { + const updateTool = new UpdateChannelTool({ accessToken, defaultParams }) + tools.push(updateTool) + } + + if (actions.includes('deleteChannel')) { + const deleteTool = new DeleteChannelTool({ accessToken, defaultParams }) + tools.push(deleteTool) + } + + if (actions.includes('archiveChannel')) { + const archiveTool = new ArchiveChannelTool({ accessToken, defaultParams }) + tools.push(archiveTool) + } + + if (actions.includes('unarchiveChannel')) { + const unarchiveTool = new UnarchiveChannelTool({ accessToken, defaultParams }) + tools.push(unarchiveTool) + } + + if (actions.includes('listChannelMembers')) { + const listMembersTool = new ListChannelMembersTool({ accessToken, defaultParams }) + tools.push(listMembersTool) + } + + if (actions.includes('addChannelMember')) { + const addMemberTool = new AddChannelMemberTool({ accessToken, defaultParams }) + tools.push(addMemberTool) + } + + if (actions.includes('removeChannelMember')) { + const removeMemberTool = new RemoveChannelMemberTool({ accessToken, defaultParams }) + tools.push(removeMemberTool) + } + + // Chat tools + if (actions.includes('listChats')) { + const listTool = new ListChatsTool({ accessToken, defaultParams }) + tools.push(listTool) + } + + if (actions.includes('getChat')) { + const getTool = new GetChatTool({ accessToken, defaultParams }) + tools.push(getTool) + } + + if (actions.includes('createChat')) { + const createTool = new CreateChatTool({ accessToken, defaultParams }) + tools.push(createTool) + } + + if (actions.includes('updateChat')) { + const updateTool = new UpdateChatTool({ accessToken, defaultParams }) + tools.push(updateTool) + } + + if (actions.includes('deleteChat')) { + const deleteTool = new DeleteChatTool({ accessToken, defaultParams }) + tools.push(deleteTool) + } + + if (actions.includes('listChatMembers')) { + const listMembersTool = new ListChatMembersTool({ accessToken, defaultParams }) + tools.push(listMembersTool) + } + + if (actions.includes('addChatMember')) { + const addMemberTool = new AddChatMemberTool({ accessToken, defaultParams }) + tools.push(addMemberTool) + } + + if (actions.includes('removeChatMember')) { + const removeMemberTool = new RemoveChatMemberTool({ accessToken, defaultParams }) + tools.push(removeMemberTool) + } + + if (actions.includes('pinMessage')) { + const pinTool = new PinMessageTool({ accessToken, defaultParams }) + tools.push(pinTool) + } + + if (actions.includes('unpinMessage')) { + const unpinTool = new UnpinMessageTool({ accessToken, defaultParams }) + tools.push(unpinTool) + } + + // Chat message tools + if (actions.includes('listMessages')) { + const listTool = new ListMessagesTool({ accessToken, defaultParams }) + tools.push(listTool) + } + + if (actions.includes('getMessage')) { + const getTool = new GetMessageTool({ accessToken, defaultParams }) + tools.push(getTool) + } + + if (actions.includes('sendMessage')) { + const sendTool = new SendMessageTool({ accessToken, defaultParams }) + tools.push(sendTool) + } + + if (actions.includes('updateMessage')) { + const updateTool = new UpdateMessageTool({ accessToken, defaultParams }) + tools.push(updateTool) + } + + if (actions.includes('deleteMessage')) { + const deleteTool = new DeleteMessageTool({ accessToken, defaultParams }) + tools.push(deleteTool) + } + + if (actions.includes('replyToMessage')) { + const replyTool = new ReplyToMessageTool({ accessToken, defaultParams }) + tools.push(replyTool) + } + + if (actions.includes('setReaction')) { + const reactionTool = new SetReactionTool({ accessToken, defaultParams }) + tools.push(reactionTool) + } + + if (actions.includes('unsetReaction')) { + const unsetReactionTool = new UnsetReactionTool({ accessToken, defaultParams }) + tools.push(unsetReactionTool) + } + + if (actions.includes('getAllMessages')) { + const getAllTool = new GetAllMessagesTool({ accessToken, defaultParams }) + tools.push(getAllTool) + } + + return tools +} diff --git a/packages/components/nodes/tools/MicrosoftTeams/teams.svg b/packages/components/nodes/tools/MicrosoftTeams/teams.svg new file mode 100644 index 000000000..f3a03a3be --- /dev/null +++ b/packages/components/nodes/tools/MicrosoftTeams/teams.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/components/nodes/tools/OpenAPIToolkit/OpenAPIToolkit.ts b/packages/components/nodes/tools/OpenAPIToolkit/OpenAPIToolkit.ts index d44f5f103..5f0a8bc20 100644 --- a/packages/components/nodes/tools/OpenAPIToolkit/OpenAPIToolkit.ts +++ b/packages/components/nodes/tools/OpenAPIToolkit/OpenAPIToolkit.ts @@ -5,6 +5,7 @@ import $RefParser from '@apidevtools/json-schema-ref-parser' import { z, ZodSchema, ZodTypeAny } from 'zod' import { defaultCode, DynamicStructuredTool, howToUseCode } from './core' import { DataSource } from 'typeorm' +import fetch from 'node-fetch' class OpenAPIToolkit_Tools implements INode { label: string @@ -21,17 +22,64 @@ class OpenAPIToolkit_Tools implements INode { constructor() { this.label = 'OpenAPI Toolkit' this.name = 'openAPIToolkit' - this.version = 2.0 + this.version = 2.1 this.type = 'OpenAPIToolkit' this.icon = 'openapi.svg' this.category = 'Tools' this.description = 'Load OpenAPI specification, and converts each API endpoint to a tool' this.inputs = [ { - label: 'YAML File', - name: 'yamlFile', + label: 'Input Type', + name: 'inputType', + type: 'options', + options: [ + { + label: 'Upload File', + name: 'file' + }, + { + label: 'Provide Link', + name: 'link' + } + ], + default: 'file', + description: 'Choose how to provide the OpenAPI specification' + }, + { + label: 'OpenAPI File', + name: 'openApiFile', type: 'file', - fileType: '.yaml' + fileType: '.yaml,.json', + description: 'Upload your OpenAPI specification file (YAML or JSON)', + show: { + inputType: 'file' + } + }, + { + label: 'OpenAPI Link', + name: 'openApiLink', + type: 'string', + placeholder: 'https://api.example.com/openapi.yaml or https://api.example.com/openapi.json', + description: 'Provide a link to your OpenAPI specification (YAML or JSON)', + show: { + inputType: 'link' + } + }, + { + label: 'Server', + name: 'selectedServer', + type: 'asyncOptions', + loadMethod: 'listServers', + description: 'Select which server to use for API calls', + refresh: true + }, + { + label: 'Available Endpoints', + name: 'selectedEndpoints', + type: 'asyncMultiOptions', + loadMethod: 'listEndpoints', + description: 'Select which endpoints to expose as tools', + refresh: true }, { label: 'Return Direct', @@ -75,48 +123,237 @@ class OpenAPIToolkit_Tools implements INode { async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { const toolReturnDirect = nodeData.inputs?.returnDirect as boolean - const yamlFileBase64 = nodeData.inputs?.yamlFile as string + const inputType = nodeData.inputs?.inputType as string + const openApiFile = nodeData.inputs?.openApiFile as string + const openApiLink = nodeData.inputs?.openApiLink as string + const selectedServer = nodeData.inputs?.selectedServer as string const customCode = nodeData.inputs?.customCode as string const _headers = nodeData.inputs?.headers as string const removeNulls = nodeData.inputs?.removeNulls as boolean const headers = typeof _headers === 'object' ? _headers : _headers ? JSON.parse(_headers) : {} - let data - if (yamlFileBase64.startsWith('FILE-STORAGE::')) { - const file = yamlFileBase64.replace('FILE-STORAGE::', '') - const chatflowid = options.chatflowid - const fileData = await getFileFromStorage(file, chatflowid) - const utf8String = fileData.toString('utf-8') + const specData = await this.loadOpenApiSpec( + { + inputType, + openApiFile, + openApiLink + }, + options + ) + if (!specData) throw new Error('Failed to load OpenAPI spec') - data = load(utf8String) + const _data: any = await $RefParser.dereference(specData) + + // Use selected server or fallback to first server + let baseUrl: string + if (selectedServer && selectedServer !== 'error') { + baseUrl = selectedServer } else { - const splitDataURI = yamlFileBase64.split(',') - splitDataURI.pop() - const bf = Buffer.from(splitDataURI.pop() || '', 'base64') - const utf8String = bf.toString('utf-8') - data = load(utf8String) - } - if (!data) { - throw new Error('Failed to load OpenAPI spec') + baseUrl = _data.servers?.[0]?.url } - const _data: any = await $RefParser.dereference(data) - - const baseUrl = _data.servers[0]?.url - if (!baseUrl) { - throw new Error('OpenAPI spec does not contain a server URL') - } + if (!baseUrl) throw new Error('OpenAPI spec does not contain a server URL') const appDataSource = options.appDataSource as DataSource const databaseEntities = options.databaseEntities as IDatabaseEntity - const variables = await getVars(appDataSource, databaseEntities, nodeData) - + const variables = await getVars(appDataSource, databaseEntities, nodeData, options) const flow = { chatflowId: options.chatflowid } - const tools = getTools(_data.paths, baseUrl, headers, variables, flow, toolReturnDirect, customCode, removeNulls) + let tools = getTools(_data.paths, baseUrl, headers, variables, flow, toolReturnDirect, customCode, removeNulls) + + // Filter by selected endpoints if provided + const _selected = nodeData.inputs?.selectedEndpoints + let selected: string[] = [] + if (_selected) { + try { + selected = typeof _selected === 'string' ? JSON.parse(_selected) : _selected + } catch (e) { + selected = [] + } + } + if (selected.length) { + tools = tools.filter((t: any) => selected.includes(t.name)) + } + return tools } + + //@ts-ignore + loadMethods = { + listServers: async (nodeData: INodeData, options: ICommonObject) => { + try { + const inputType = nodeData.inputs?.inputType as string + const openApiFile = nodeData.inputs?.openApiFile as string + const openApiLink = nodeData.inputs?.openApiLink as string + const specData: any = await this.loadOpenApiSpec( + { + inputType, + openApiFile, + openApiLink + }, + options + ) + if (!specData) return [] + const _data: any = await $RefParser.dereference(specData) + const items: { label: string; name: string; description?: string }[] = [] + const servers = _data.servers || [] + + if (servers.length === 0) { + return [ + { + label: 'No Servers Found', + name: 'error', + description: 'No servers defined in the OpenAPI specification' + } + ] + } + + for (let i = 0; i < servers.length; i++) { + const server = servers[i] + const serverUrl = server.url || `Server ${i + 1}` + const serverDesc = server.description || serverUrl + items.push({ + label: serverUrl, + name: serverUrl, + description: serverDesc + }) + } + + return items + } catch (e) { + return [ + { + label: 'No Servers Found', + name: 'error', + description: 'No available servers, check the link/file and refresh' + } + ] + } + }, + listEndpoints: async (nodeData: INodeData, options: ICommonObject) => { + try { + const inputType = nodeData.inputs?.inputType as string + const openApiFile = nodeData.inputs?.openApiFile as string + const openApiLink = nodeData.inputs?.openApiLink as string + const specData: any = await this.loadOpenApiSpec( + { + inputType, + openApiFile, + openApiLink + }, + options + ) + if (!specData) return [] + const _data: any = await $RefParser.dereference(specData) + const items: { label: string; name: string; description?: string }[] = [] + const paths = _data.paths || {} + for (const path in paths) { + const methods = paths[path] + for (const method in methods) { + if (['get', 'post', 'put', 'delete', 'patch'].includes(method)) { + const spec = methods[method] + const opId = spec.operationId || `${method.toUpperCase()} ${path}` + const desc = spec.description || spec.summary || opId + items.push({ label: opId, name: opId, description: desc }) + } + } + } + items.sort((a, b) => a.label.localeCompare(b.label)) + return items + } catch (e) { + return [ + { + label: 'No Endpoints Found', + name: 'error', + description: 'No available endpoints, check the link/file and refresh' + } + ] + } + } + } + + private async loadOpenApiSpec( + args: { + inputType?: string + openApiFile?: string + openApiLink?: string + }, + options: ICommonObject + ): Promise { + const { inputType = 'file', openApiFile = '', openApiLink = '' } = args + try { + if (inputType === 'link' && openApiLink) { + const res = await fetch(openApiLink) + const text = await res.text() + + // Auto-detect format from URL extension or content + const isJsonUrl = openApiLink.toLowerCase().includes('.json') + const isYamlUrl = openApiLink.toLowerCase().includes('.yaml') || openApiLink.toLowerCase().includes('.yml') + + if (isJsonUrl) { + return JSON.parse(text) + } else if (isYamlUrl) { + return load(text) + } else { + // Auto-detect format from content + try { + return JSON.parse(text) + } catch (_) { + return load(text) + } + } + } + + if (inputType === 'file' && openApiFile) { + let utf8String: string + let fileName = '' + + if (openApiFile.startsWith('FILE-STORAGE::')) { + const file = openApiFile.replace('FILE-STORAGE::', '') + fileName = file + const orgId = options.orgId + const chatflowid = options.chatflowid + const fileData = await getFileFromStorage(file, orgId, chatflowid) + utf8String = fileData.toString('utf-8') + } else { + // Extract filename from data URI if possible + const splitDataURI = openApiFile.split(',') + const mimeType = splitDataURI[0] || '' + if (mimeType.includes('filename=')) { + const filenameMatch = mimeType.match(/filename=([^;]+)/) + if (filenameMatch) { + fileName = filenameMatch[1] + } + } + splitDataURI.pop() + const bf = Buffer.from(splitDataURI.pop() || '', 'base64') + utf8String = bf.toString('utf-8') + } + + // Auto-detect format from file extension or content + const isJsonFile = fileName.toLowerCase().endsWith('.json') + const isYamlFile = fileName.toLowerCase().endsWith('.yaml') || fileName.toLowerCase().endsWith('.yml') + + if (isJsonFile) { + return JSON.parse(utf8String) + } else if (isYamlFile) { + return load(utf8String) + } else { + // Auto-detect format from content + try { + return JSON.parse(utf8String) + } catch (_) { + return load(utf8String) + } + } + } + } catch (e) { + console.error('Error loading OpenAPI spec:', e) + return null + } + return null + } } const jsonSchemaToZodSchema = (schema: any, requiredList: string[], keyName: string): ZodSchema => { @@ -242,6 +479,9 @@ const getTools = ( const methods = paths[path] for (const method in methods) { // example of method: "get" + if (method !== 'get' && method !== 'post' && method !== 'put' && method !== 'delete' && method !== 'patch') { + continue + } const spec = methods[method] const toolName = spec.operationId const toolDesc = spec.description || spec.summary || toolName @@ -317,7 +557,7 @@ const getTools = ( dynamicStructuredTool.setVariables(variables) dynamicStructuredTool.setFlowObject(flow) dynamicStructuredTool.returnDirect = returnDirect - tools.push(dynamicStructuredTool) + if (toolName && toolDesc) tools.push(dynamicStructuredTool) } } return tools diff --git a/packages/components/nodes/tools/OpenAPIToolkit/core.ts b/packages/components/nodes/tools/OpenAPIToolkit/core.ts index f7701770e..ebfc8c14b 100644 --- a/packages/components/nodes/tools/OpenAPIToolkit/core.ts +++ b/packages/components/nodes/tools/OpenAPIToolkit/core.ts @@ -1,10 +1,9 @@ import { z } from 'zod' import { RequestInit } from 'node-fetch' -import { NodeVM } from '@flowiseai/nodevm' import { RunnableConfig } from '@langchain/core/runnables' import { StructuredTool, ToolParams } from '@langchain/core/tools' import { CallbackManagerForToolRun, Callbacks, CallbackManager, parseCallbackConfigArg } from '@langchain/core/callbacks/manager' -import { availableDependencies, defaultAllowBuiltInDep, prepareSandboxVars } from '../../../src/utils' +import { executeJavaScriptCode, createCodeExecutionSandbox, parseWithTypeConversion } from '../../../src/utils' import { ICommonObject } from '../../../src/Interface' const removeNulls = (obj: Record) => { @@ -175,7 +174,7 @@ export class DynamicStructuredTool< } let parsed try { - parsed = await this.schema.parseAsync(arg) + parsed = await parseWithTypeConversion(this.schema, arg) } catch (e) { throw new ToolInputParsingException(`Received tool input did not match expected schema ${e}`, JSON.stringify(arg)) } @@ -217,32 +216,22 @@ export class DynamicStructuredTool< _?: CallbackManagerForToolRun, flowConfig?: { sessionId?: string; chatId?: string; input?: string; state?: ICommonObject } ): Promise { - let sandbox: any = { - util: undefined, - Symbol: undefined, - child_process: undefined, - fs: undefined, - process: undefined - } let processedArg = { ...arg } if (this.removeNulls && typeof processedArg === 'object' && processedArg !== null) { processedArg = removeNulls(processedArg) } + // Create additional sandbox variables for tool arguments + const additionalSandbox: ICommonObject = {} + if (typeof processedArg === 'object' && Object.keys(processedArg).length) { for (const item in processedArg) { - sandbox[`$${item}`] = processedArg[item] + additionalSandbox[`$${item}`] = processedArg[item] } } - sandbox['$vars'] = prepareSandboxVars(this.variables) - - // inject flow properties - if (this.flowObj) { - sandbox['$flow'] = { ...this.flowObj, ...flowConfig } - } - + // Prepare HTTP request options const callOptions: RequestInit = { method: this.method, headers: { @@ -253,31 +242,22 @@ export class DynamicStructuredTool< if (arg.RequestBody && this.method.toUpperCase() !== 'GET') { callOptions.body = JSON.stringify(arg.RequestBody) } - sandbox['$options'] = callOptions + additionalSandbox['$options'] = callOptions + // Generate complete URL const completeUrl = getUrl(this.baseUrl, arg) - sandbox['$url'] = completeUrl + additionalSandbox['$url'] = completeUrl - const builtinDeps = process.env.TOOL_FUNCTION_BUILTIN_DEP - ? defaultAllowBuiltInDep.concat(process.env.TOOL_FUNCTION_BUILTIN_DEP.split(',')) - : defaultAllowBuiltInDep - const externalDeps = process.env.TOOL_FUNCTION_EXTERNAL_DEP ? process.env.TOOL_FUNCTION_EXTERNAL_DEP.split(',') : [] - const deps = availableDependencies.concat(externalDeps) + // Prepare flow object for sandbox + const flow = this.flowObj ? { ...this.flowObj, ...flowConfig } : {} - const options = { - console: 'inherit', - sandbox, - require: { - external: { modules: deps }, - builtin: builtinDeps - }, - eval: false, - wasm: false, - timeout: 10000 - } as any + const sandbox = createCodeExecutionSandbox('', this.variables || [], flow, additionalSandbox) - const vm = new NodeVM(options) - const response = await vm.run(`module.exports = async function() {${this.customCode || defaultCode}}()`, __dirname) + let response = await executeJavaScriptCode(this.customCode || defaultCode, sandbox) + + if (typeof response === 'object') { + response = JSON.stringify(response) + } return response } diff --git a/packages/components/nodes/tools/ReadFile/ReadFile.ts b/packages/components/nodes/tools/ReadFile/ReadFile.ts deleted file mode 100644 index 6fa4f72ac..000000000 --- a/packages/components/nodes/tools/ReadFile/ReadFile.ts +++ /dev/null @@ -1,85 +0,0 @@ -import { z } from 'zod' -import { StructuredTool, ToolParams } from '@langchain/core/tools' -import { Serializable } from '@langchain/core/load/serializable' -import { NodeFileStore } from 'langchain/stores/file/node' -import { INode, INodeData, INodeParams } from '../../../src/Interface' -import { getBaseClasses } from '../../../src/utils' - -abstract class BaseFileStore extends Serializable { - abstract readFile(path: string): Promise - abstract writeFile(path: string, contents: string): Promise -} - -class ReadFile_Tools implements INode { - label: string - name: string - version: number - description: string - type: string - icon: string - category: string - baseClasses: string[] - inputs: INodeParams[] - - constructor() { - this.label = 'Read File' - this.name = 'readFile' - this.version = 1.0 - this.type = 'ReadFile' - this.icon = 'readfile.svg' - this.category = 'Tools' - this.description = 'Read file from disk' - this.baseClasses = [this.type, 'Tool', ...getBaseClasses(ReadFileTool)] - this.inputs = [ - { - label: 'Base Path', - name: 'basePath', - placeholder: `C:\\Users\\User\\Desktop`, - type: 'string', - optional: true - } - ] - } - - async init(nodeData: INodeData): Promise { - const basePath = nodeData.inputs?.basePath as string - const store = basePath ? new NodeFileStore(basePath) : new NodeFileStore() - return new ReadFileTool({ store }) - } -} - -interface ReadFileParams extends ToolParams { - store: BaseFileStore -} - -/** - * Class for reading files from the disk. Extends the StructuredTool - * class. - */ -export class ReadFileTool extends StructuredTool { - static lc_name() { - return 'ReadFileTool' - } - - schema = z.object({ - file_path: z.string().describe('name of file') - }) as any - - name = 'read_file' - - description = 'Read file from disk' - - store: BaseFileStore - - constructor({ store }: ReadFileParams) { - super(...arguments) - - this.store = store - } - - async _call({ file_path }: z.infer) { - return await this.store.readFile(file_path) - } -} - -module.exports = { nodeClass: ReadFile_Tools } diff --git a/packages/components/nodes/tools/ReadFile/readfile.svg b/packages/components/nodes/tools/ReadFile/readfile.svg deleted file mode 100644 index c7cba0efa..000000000 --- a/packages/components/nodes/tools/ReadFile/readfile.svg +++ /dev/null @@ -1,4 +0,0 @@ - - - - diff --git a/packages/components/nodes/tools/RequestsDelete/RequestsDelete.ts b/packages/components/nodes/tools/RequestsDelete/RequestsDelete.ts new file mode 100644 index 000000000..22a5bb8eb --- /dev/null +++ b/packages/components/nodes/tools/RequestsDelete/RequestsDelete.ts @@ -0,0 +1,141 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses, stripHTMLFromToolInput, parseJsonBody } from '../../../src/utils' +import { desc, RequestParameters, RequestsDeleteTool } from './core' + +const codeExample = `{ + "id": { + "type": "string", + "required": true, + "in": "path", + "description": "ID of the item to delete. /:id" + }, + "force": { + "type": "string", + "in": "query", + "description": "Force delete the item. ?force=true" + } +}` + +class RequestsDelete_Tools implements INode { + label: string + name: string + version: number + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Requests Delete' + this.name = 'requestsDelete' + this.version = 1.0 + this.type = 'RequestsDelete' + this.icon = 'del.png' + this.category = 'Tools' + this.description = 'Execute HTTP DELETE requests' + this.baseClasses = [this.type, ...getBaseClasses(RequestsDeleteTool), 'Tool'] + this.inputs = [ + { + label: 'URL', + name: 'requestsDeleteUrl', + type: 'string', + acceptVariable: true + }, + { + label: 'Name', + name: 'requestsDeleteName', + type: 'string', + default: 'requests_delete', + description: 'Name of the tool', + additionalParams: true, + optional: true + }, + { + label: 'Description', + name: 'requestsDeleteDescription', + type: 'string', + rows: 4, + default: desc, + description: 'Describe to LLM when it should use this tool', + additionalParams: true, + optional: true + }, + { + label: 'Headers', + name: 'requestsDeleteHeaders', + type: 'string', + rows: 4, + acceptVariable: true, + additionalParams: true, + optional: true, + placeholder: `{ + "Authorization": "Bearer " +}` + }, + { + label: 'Query Params Schema', + name: 'requestsDeleteQueryParamsSchema', + type: 'code', + description: 'Description of the available query params to enable LLM to figure out which query params to use', + placeholder: `{ + "id": { + "type": "string", + "required": true, + "in": "path", + "description": "ID of the item to delete. /:id" + }, + "force": { + "type": "string", + "in": "query", + "description": "Force delete the item. ?force=true" + } +}`, + optional: true, + hideCodeExecute: true, + additionalParams: true, + codeExample: codeExample + }, + { + label: 'Max Output Length', + name: 'requestsDeleteMaxOutputLength', + type: 'number', + description: 'Max length of the output. Remove this if you want to return the entire response', + default: '2000', + step: 1, + optional: true, + additionalParams: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const headers = (nodeData.inputs?.headers as string) || (nodeData.inputs?.requestsDeleteHeaders as string) + const url = (nodeData.inputs?.url as string) || (nodeData.inputs?.requestsDeleteUrl as string) + const description = (nodeData.inputs?.description as string) || (nodeData.inputs?.requestsDeleteDescription as string) + const name = (nodeData.inputs?.name as string) || (nodeData.inputs?.requestsDeleteName as string) + const queryParamsSchema = + (nodeData.inputs?.queryParamsSchema as string) || (nodeData.inputs?.requestsDeleteQueryParamsSchema as string) + const maxOutputLength = nodeData.inputs?.requestsDeleteMaxOutputLength as string + + const obj: RequestParameters = {} + if (url) obj.url = stripHTMLFromToolInput(url) + if (description) obj.description = description + if (name) + obj.name = name + .toLowerCase() + .replace(/ /g, '_') + .replace(/[^a-z0-9_-]/g, '') + if (queryParamsSchema) obj.queryParamsSchema = queryParamsSchema + if (maxOutputLength) obj.maxOutputLength = parseInt(maxOutputLength, 10) + if (headers) { + const parsedHeaders = typeof headers === 'object' ? headers : parseJsonBody(stripHTMLFromToolInput(headers)) + obj.headers = parsedHeaders + } + + return new RequestsDeleteTool(obj) + } +} + +module.exports = { nodeClass: RequestsDelete_Tools } diff --git a/packages/components/nodes/tools/RequestsDelete/core.ts b/packages/components/nodes/tools/RequestsDelete/core.ts new file mode 100644 index 000000000..47e8b5866 --- /dev/null +++ b/packages/components/nodes/tools/RequestsDelete/core.ts @@ -0,0 +1,185 @@ +import { z } from 'zod' +import { DynamicStructuredTool } from '../OpenAPIToolkit/core' +import { secureFetch } from '../../../src/httpSecurity' +import { parseJsonBody } from '../../../src/utils' + +export const desc = `Use this when you need to execute a DELETE request to remove data from a website.` + +export interface Headers { + [key: string]: string +} + +export interface RequestParameters { + headers?: Headers + url?: string + name?: string + queryParamsSchema?: string + description?: string + maxOutputLength?: number +} + +// Base schema for DELETE request +const createRequestsDeleteSchema = (queryParamsSchema?: string) => { + // If queryParamsSchema is provided, parse it and add dynamic query params + if (queryParamsSchema) { + try { + const parsedSchema = parseJsonBody(queryParamsSchema) + const queryParamsObject: Record = {} + + Object.entries(parsedSchema).forEach(([key, config]: [string, any]) => { + let zodType: z.ZodTypeAny = z.string() + + // Handle different types + if (config.type === 'number') { + zodType = z.string().transform((val) => Number(val)) + } else if (config.type === 'boolean') { + zodType = z.string().transform((val) => val === 'true') + } + + // Add description + if (config.description) { + zodType = zodType.describe(config.description) + } + + // Make optional if not required + if (!config.required) { + zodType = zodType.optional() + } + + queryParamsObject[key] = zodType + }) + + if (Object.keys(queryParamsObject).length > 0) { + return z.object({ + queryParams: z.object(queryParamsObject).optional().describe('Query parameters for the request') + }) + } + } catch (error) { + console.warn('Failed to parse queryParamsSchema:', error) + } + } + + // Fallback to generic query params + return z.object({ + queryParams: z.record(z.string()).optional().describe('Optional query parameters to include in the request') + }) +} + +export class RequestsDeleteTool extends DynamicStructuredTool { + url = '' + maxOutputLength = Infinity + headers = {} + queryParamsSchema?: string + + constructor(args?: RequestParameters) { + const schema = createRequestsDeleteSchema(args?.queryParamsSchema) + + const toolInput = { + name: args?.name || 'requests_delete', + description: args?.description || desc, + schema: schema, + baseUrl: '', + method: 'DELETE', + headers: args?.headers || {} + } + super(toolInput) + this.url = args?.url ?? this.url + this.headers = args?.headers ?? this.headers + this.maxOutputLength = args?.maxOutputLength ?? this.maxOutputLength + this.queryParamsSchema = args?.queryParamsSchema + } + + /** @ignore */ + async _call(arg: any): Promise { + const params = { ...arg } + + const inputUrl = this.url + if (!inputUrl) { + throw new Error('URL is required for DELETE request') + } + + const requestHeaders = { + ...(params.headers || {}), + ...this.headers + } + + // Process URL and query parameters based on schema + let finalUrl = inputUrl + const queryParams: Record = {} + + if (this.queryParamsSchema && params.queryParams && Object.keys(params.queryParams).length > 0) { + try { + const parsedSchema = parseJsonBody(this.queryParamsSchema) + const pathParams: Array<{ key: string; value: string }> = [] + + Object.entries(params.queryParams).forEach(([key, value]) => { + const paramConfig = parsedSchema[key] + if (paramConfig && value !== undefined && value !== null) { + if (paramConfig.in === 'path') { + // Check if URL contains path parameter placeholder + const pathPattern = new RegExp(`:${key}\\b`, 'g') + if (finalUrl.includes(`:${key}`)) { + // Replace path parameters in URL (e.g., /:id -> /123) + finalUrl = finalUrl.replace(pathPattern, encodeURIComponent(String(value))) + } else { + // Collect path parameters to append to URL + pathParams.push({ key, value: String(value) }) + } + } else if (paramConfig.in === 'query') { + // Add to query parameters + queryParams[key] = String(value) + } + } + }) + + // Append path parameters to URL if any exist + if (pathParams.length > 0) { + let urlPath = finalUrl + // Remove trailing slash if present + if (urlPath.endsWith('/')) { + urlPath = urlPath.slice(0, -1) + } + // Append each path parameter + pathParams.forEach(({ value }) => { + urlPath += `/${encodeURIComponent(value)}` + }) + finalUrl = urlPath + } + + // Add query parameters to URL if any exist + if (Object.keys(queryParams).length > 0) { + const url = new URL(finalUrl) + Object.entries(queryParams).forEach(([key, value]) => { + url.searchParams.append(key, value) + }) + finalUrl = url.toString() + } + } catch (error) { + console.warn('Failed to process queryParamsSchema:', error) + } + } else if (params.queryParams && Object.keys(params.queryParams).length > 0) { + // Fallback: treat all parameters as query parameters if no schema is defined + const url = new URL(finalUrl) + Object.entries(params.queryParams).forEach(([key, value]) => { + url.searchParams.append(key, String(value)) + }) + finalUrl = url.toString() + } + + try { + const res = await secureFetch(finalUrl, { + method: 'DELETE', + headers: requestHeaders + }) + + if (!res.ok) { + throw new Error(`HTTP Error ${res.status}: ${res.statusText}`) + } + + const text = await res.text() + return text.slice(0, this.maxOutputLength) + } catch (error) { + throw new Error(`Failed to make DELETE request: ${error instanceof Error ? error.message : 'Unknown error'}`) + } + } +} diff --git a/packages/components/nodes/tools/RequestsDelete/del.png b/packages/components/nodes/tools/RequestsDelete/del.png new file mode 100644 index 000000000..dead64c46 Binary files /dev/null and b/packages/components/nodes/tools/RequestsDelete/del.png differ diff --git a/packages/components/nodes/tools/RequestsGet/RequestsGet.ts b/packages/components/nodes/tools/RequestsGet/RequestsGet.ts index 91cff5000..6a7eed986 100644 --- a/packages/components/nodes/tools/RequestsGet/RequestsGet.ts +++ b/packages/components/nodes/tools/RequestsGet/RequestsGet.ts @@ -1,7 +1,21 @@ import { INode, INodeData, INodeParams } from '../../../src/Interface' -import { getBaseClasses } from '../../../src/utils' +import { getBaseClasses, stripHTMLFromToolInput, parseJsonBody } from '../../../src/utils' import { desc, RequestParameters, RequestsGetTool } from './core' +const codeExample = `{ + "id": { + "type": "string", + "required": true, + "in": "path", + "description": "ID of the item to get. /:id" + }, + "limit": { + "type": "string", + "in": "query", + "description": "Limit the number of items to get. ?limit=10" + } +}` + class RequestsGet_Tools implements INode { label: string name: string @@ -16,52 +30,107 @@ class RequestsGet_Tools implements INode { constructor() { this.label = 'Requests Get' this.name = 'requestsGet' - this.version = 1.0 + this.version = 2.0 this.type = 'RequestsGet' - this.icon = 'requestsget.svg' + this.icon = 'get.png' this.category = 'Tools' this.description = 'Execute HTTP GET requests' - this.baseClasses = [this.type, ...getBaseClasses(RequestsGetTool)] + this.baseClasses = [this.type, ...getBaseClasses(RequestsGetTool), 'Tool'] this.inputs = [ { label: 'URL', - name: 'url', + name: 'requestsGetUrl', type: 'string', - description: - 'Agent will make call to this exact URL. If not specified, agent will try to figure out itself from AIPlugin if provided', + acceptVariable: true + }, + { + label: 'Name', + name: 'requestsGetName', + type: 'string', + default: 'requests_get', + description: 'Name of the tool', additionalParams: true, optional: true }, { label: 'Description', - name: 'description', + name: 'requestsGetDescription', type: 'string', rows: 4, default: desc, - description: 'Acts like a prompt to tell agent when it should use this tool', + description: 'Describe to LLM when it should use this tool', additionalParams: true, optional: true }, { label: 'Headers', - name: 'headers', - type: 'json', + name: 'requestsGetHeaders', + type: 'string', + rows: 4, + acceptVariable: true, additionalParams: true, - optional: true + optional: true, + placeholder: `{ + "Authorization": "Bearer " +}` + }, + { + label: 'Query Params Schema', + name: 'requestsGetQueryParamsSchema', + type: 'code', + description: 'Description of the available query params to enable LLM to figure out which query params to use', + placeholder: `{ + "id": { + "type": "string", + "required": true, + "in": "path", + "description": "ID of the item to get. /:id" + }, + "limit": { + "type": "string", + "in": "query", + "description": "Limit the number of items to get. ?limit=10" + } +}`, + optional: true, + hideCodeExecute: true, + additionalParams: true, + codeExample: codeExample + }, + { + label: 'Max Output Length', + name: 'requestsGetMaxOutputLength', + type: 'number', + description: 'Max length of the output. Remove this if you want to return the entire response', + default: '2000', + step: 1, + optional: true, + additionalParams: true } ] } async init(nodeData: INodeData): Promise { - const headers = nodeData.inputs?.headers as string - const url = nodeData.inputs?.url as string - const description = nodeData.inputs?.description as string + const headers = (nodeData.inputs?.headers as string) || (nodeData.inputs?.requestsGetHeaders as string) + const url = (nodeData.inputs?.url as string) || (nodeData.inputs?.requestsGetUrl as string) + const description = (nodeData.inputs?.description as string) || (nodeData.inputs?.requestsGetDescription as string) + const name = (nodeData.inputs?.name as string) || (nodeData.inputs?.requestsGetName as string) + const queryParamsSchema = + (nodeData.inputs?.queryParamsSchema as string) || (nodeData.inputs?.requestsGetQueryParamsSchema as string) + const maxOutputLength = nodeData.inputs?.requestsGetMaxOutputLength as string const obj: RequestParameters = {} - if (url) obj.url = url + if (url) obj.url = stripHTMLFromToolInput(url) if (description) obj.description = description + if (name) + obj.name = name + .toLowerCase() + .replace(/ /g, '_') + .replace(/[^a-z0-9_-]/g, '') + if (queryParamsSchema) obj.queryParamsSchema = queryParamsSchema + if (maxOutputLength) obj.maxOutputLength = parseInt(maxOutputLength, 10) if (headers) { - const parsedHeaders = typeof headers === 'object' ? headers : JSON.parse(headers) + const parsedHeaders = typeof headers === 'object' ? headers : parseJsonBody(stripHTMLFromToolInput(headers)) obj.headers = parsedHeaders } diff --git a/packages/components/nodes/tools/RequestsGet/core.ts b/packages/components/nodes/tools/RequestsGet/core.ts index ea97cdf22..ca2b07d46 100644 --- a/packages/components/nodes/tools/RequestsGet/core.ts +++ b/packages/components/nodes/tools/RequestsGet/core.ts @@ -1,8 +1,9 @@ -import fetch from 'node-fetch' -import { Tool } from '@langchain/core/tools' +import { z } from 'zod' +import { DynamicStructuredTool } from '../OpenAPIToolkit/core' +import { secureFetch } from '../../../src/httpSecurity' +import { parseJsonBody } from '../../../src/utils' -export const desc = `A portal to the internet. Use this when you need to get specific content from a website. -Input should be a url (i.e. https://www.google.com). The output will be the text response of the GET request.` +export const desc = `Use this when you need to execute a GET request to get data from a website.` export interface Headers { [key: string]: string @@ -11,36 +12,173 @@ export interface Headers { export interface RequestParameters { headers?: Headers url?: string + name?: string + queryParamsSchema?: string description?: string maxOutputLength?: number } -export class RequestsGetTool extends Tool { - name = 'requests_get' +// Base schema for GET request +const createRequestsGetSchema = (queryParamsSchema?: string) => { + // If queryParamsSchema is provided, parse it and add dynamic query params + if (queryParamsSchema) { + try { + const parsedSchema = parseJsonBody(queryParamsSchema) + const queryParamsObject: Record = {} + + Object.entries(parsedSchema).forEach(([key, config]: [string, any]) => { + let zodType: z.ZodTypeAny = z.string() + + // Handle different types + if (config.type === 'number') { + zodType = z.string().transform((val) => Number(val)) + } else if (config.type === 'boolean') { + zodType = z.string().transform((val) => val === 'true') + } + + // Add description + if (config.description) { + zodType = zodType.describe(config.description) + } + + // Make optional if not required + if (!config.required) { + zodType = zodType.optional() + } + + queryParamsObject[key] = zodType + }) + + if (Object.keys(queryParamsObject).length > 0) { + return z.object({ + queryParams: z.object(queryParamsObject).optional().describe('Query parameters for the request') + }) + } + } catch (error) { + console.warn('Failed to parse queryParamsSchema:', error) + } + } + + // Fallback to generic query params + return z.object({ + queryParams: z.record(z.string()).optional().describe('Optional query parameters to include in the request') + }) +} + +export class RequestsGetTool extends DynamicStructuredTool { url = '' - description = desc - maxOutputLength = 2000 + maxOutputLength = Infinity headers = {} + queryParamsSchema?: string constructor(args?: RequestParameters) { - super() + const schema = createRequestsGetSchema(args?.queryParamsSchema) + + const toolInput = { + name: args?.name || 'requests_get', + description: args?.description || desc, + schema: schema, + baseUrl: '', + method: 'GET', + headers: args?.headers || {} + } + super(toolInput) this.url = args?.url ?? this.url this.headers = args?.headers ?? this.headers - this.description = args?.description ?? this.description this.maxOutputLength = args?.maxOutputLength ?? this.maxOutputLength + this.queryParamsSchema = args?.queryParamsSchema } /** @ignore */ - async _call(input: string) { - const inputUrl = !this.url ? input : this.url + async _call(arg: any): Promise { + const params = { ...arg } - if (process.env.DEBUG === 'true') console.info(`Making GET API call to ${inputUrl}`) + const inputUrl = this.url + if (!inputUrl) { + throw new Error('URL is required for GET request') + } - const res = await fetch(inputUrl, { - headers: this.headers - }) + const requestHeaders = { + ...(params.headers || {}), + ...this.headers + } - const text = await res.text() - return text.slice(0, this.maxOutputLength) + // Process URL and query parameters based on schema + let finalUrl = inputUrl + const queryParams: Record = {} + + if (this.queryParamsSchema && params.queryParams && Object.keys(params.queryParams).length > 0) { + try { + const parsedSchema = parseJsonBody(this.queryParamsSchema) + const pathParams: Array<{ key: string; value: string }> = [] + + Object.entries(params.queryParams).forEach(([key, value]) => { + const paramConfig = parsedSchema[key] + if (paramConfig && value !== undefined && value !== null) { + if (paramConfig.in === 'path') { + // Check if URL contains path parameter placeholder + const pathPattern = new RegExp(`:${key}\\b`, 'g') + if (finalUrl.includes(`:${key}`)) { + // Replace path parameters in URL (e.g., /:id -> /123) + finalUrl = finalUrl.replace(pathPattern, encodeURIComponent(String(value))) + } else { + // Collect path parameters to append to URL + pathParams.push({ key, value: String(value) }) + } + } else if (paramConfig.in === 'query') { + // Add to query parameters + queryParams[key] = String(value) + } + } + }) + + // Append path parameters to URL if any exist + if (pathParams.length > 0) { + let urlPath = finalUrl + // Remove trailing slash if present + if (urlPath.endsWith('/')) { + urlPath = urlPath.slice(0, -1) + } + // Append each path parameter + pathParams.forEach(({ value }) => { + urlPath += `/${encodeURIComponent(value)}` + }) + finalUrl = urlPath + } + + // Add query parameters to URL if any exist + if (Object.keys(queryParams).length > 0) { + const url = new URL(finalUrl) + Object.entries(queryParams).forEach(([key, value]) => { + url.searchParams.append(key, value) + }) + finalUrl = url.toString() + } + } catch (error) { + console.warn('Failed to process queryParamsSchema:', error) + } + } else if (params.queryParams && Object.keys(params.queryParams).length > 0) { + // Fallback: treat all parameters as query parameters if no schema is defined + const url = new URL(finalUrl) + Object.entries(params.queryParams).forEach(([key, value]) => { + url.searchParams.append(key, String(value)) + }) + finalUrl = url.toString() + } + + try { + const res = await secureFetch(finalUrl, { + headers: requestHeaders + }) + + if (!res.ok) { + throw new Error(`HTTP Error ${res.status}: ${res.statusText}`) + } + + const text = await res.text() + return text.slice(0, this.maxOutputLength) + } catch (error) { + throw new Error(`Failed to make GET request: ${error instanceof Error ? error.message : 'Unknown error'}`) + } } } diff --git a/packages/components/nodes/tools/RequestsGet/get.png b/packages/components/nodes/tools/RequestsGet/get.png new file mode 100644 index 000000000..ac859c47f Binary files /dev/null and b/packages/components/nodes/tools/RequestsGet/get.png differ diff --git a/packages/components/nodes/tools/RequestsGet/requestsget.svg b/packages/components/nodes/tools/RequestsGet/requestsget.svg deleted file mode 100644 index d92c5b51a..000000000 --- a/packages/components/nodes/tools/RequestsGet/requestsget.svg +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - diff --git a/packages/components/nodes/tools/RequestsPost/RequestsPost.ts b/packages/components/nodes/tools/RequestsPost/RequestsPost.ts index 9ff3d1426..3341f05d4 100644 --- a/packages/components/nodes/tools/RequestsPost/RequestsPost.ts +++ b/packages/components/nodes/tools/RequestsPost/RequestsPost.ts @@ -1,7 +1,19 @@ import { INode, INodeData, INodeParams } from '../../../src/Interface' -import { getBaseClasses } from '../../../src/utils' +import { getBaseClasses, stripHTMLFromToolInput, parseJsonBody } from '../../../src/utils' import { RequestParameters, desc, RequestsPostTool } from './core' +const codeExample = `{ + "name": { + "type": "string", + "required": true, + "description": "Name of the item" + }, + "date": { + "type": "string", + "description": "Date of the item" + } +}` + class RequestsPost_Tools implements INode { label: string name: string @@ -16,66 +28,123 @@ class RequestsPost_Tools implements INode { constructor() { this.label = 'Requests Post' this.name = 'requestsPost' - this.version = 1.0 + this.version = 2.0 this.type = 'RequestsPost' - this.icon = 'requestspost.svg' + this.icon = 'post.png' this.category = 'Tools' this.description = 'Execute HTTP POST requests' - this.baseClasses = [this.type, ...getBaseClasses(RequestsPostTool)] + this.baseClasses = [this.type, ...getBaseClasses(RequestsPostTool), 'Tool'] this.inputs = [ { label: 'URL', - name: 'url', + name: 'requestsPostUrl', type: 'string', - description: - 'Agent will make call to this exact URL. If not specified, agent will try to figure out itself from AIPlugin if provided', - additionalParams: true, - optional: true + acceptVariable: true }, { - label: 'Body', - name: 'body', - type: 'json', - description: - 'JSON body for the POST request. If not specified, agent will try to figure out itself from AIPlugin if provided', + label: 'Name', + name: 'requestsPostName', + type: 'string', + default: 'requests_post', + description: 'Name of the tool', additionalParams: true, optional: true }, { label: 'Description', - name: 'description', + name: 'requestsPostDescription', type: 'string', rows: 4, default: desc, - description: 'Acts like a prompt to tell agent when it should use this tool', + description: 'Describe to LLM when it should use this tool', additionalParams: true, optional: true }, { label: 'Headers', - name: 'headers', - type: 'json', + name: 'requestsPostHeaders', + type: 'string', + rows: 4, + acceptVariable: true, additionalParams: true, - optional: true + optional: true, + placeholder: `{ + "Authorization": "Bearer " +}` + }, + { + label: 'Body', + name: 'requestPostBody', + type: 'string', + rows: 4, + description: 'JSON body for the POST request. This will override the body generated by the LLM', + additionalParams: true, + acceptVariable: true, + optional: true, + placeholder: `{ + "name": "John Doe", + "age": 30 +}` + }, + { + label: 'Body Schema', + name: 'requestsPostBodySchema', + type: 'code', + description: 'Description of the available body params to enable LLM to figure out which body params to use', + placeholder: `{ + "name": { + "type": "string", + "required": true, + "description": "Name of the item" + }, + "date": { + "type": "string", + "description": "Date of the item" + } +}`, + optional: true, + hideCodeExecute: true, + additionalParams: true, + codeExample: codeExample + }, + { + label: 'Max Output Length', + name: 'requestsPostMaxOutputLength', + type: 'number', + description: 'Max length of the output. Remove this if you want to return the entire response', + default: '2000', + step: 1, + optional: true, + additionalParams: true } ] } async init(nodeData: INodeData): Promise { - const headers = nodeData.inputs?.headers as string - const url = nodeData.inputs?.url as string - const description = nodeData.inputs?.description as string - const body = nodeData.inputs?.body as string + const headers = (nodeData.inputs?.headers as string) || (nodeData.inputs?.requestsPostHeaders as string) + const url = (nodeData.inputs?.url as string) || (nodeData.inputs?.requestsPostUrl as string) + const name = (nodeData.inputs?.name as string) || (nodeData.inputs?.requestsPostName as string) + const description = (nodeData.inputs?.description as string) || (nodeData.inputs?.requestsPostDescription as string) + const body = (nodeData.inputs?.body as string) || (nodeData.inputs?.requestPostBody as string) + const bodySchema = nodeData.inputs?.requestsPostBodySchema as string + const maxOutputLength = (nodeData.inputs?.maxOutputLength as string) || (nodeData.inputs?.requestsPostMaxOutputLength as string) const obj: RequestParameters = {} - if (url) obj.url = url + if (url) obj.url = stripHTMLFromToolInput(url) if (description) obj.description = description + if (name) + obj.name = name + .toLowerCase() + .replace(/ /g, '_') + .replace(/[^a-z0-9_-]/g, '') + if (bodySchema) obj.bodySchema = stripHTMLFromToolInput(bodySchema) + if (maxOutputLength) obj.maxOutputLength = parseInt(maxOutputLength, 10) if (headers) { - const parsedHeaders = typeof headers === 'object' ? headers : JSON.parse(headers) + const parsedHeaders = typeof headers === 'object' ? headers : parseJsonBody(stripHTMLFromToolInput(headers)) obj.headers = parsedHeaders } if (body) { - const parsedBody = typeof body === 'object' ? body : JSON.parse(body) + const parsedBody = typeof body === 'object' ? body : parseJsonBody(body) obj.body = parsedBody } diff --git a/packages/components/nodes/tools/RequestsPost/core.ts b/packages/components/nodes/tools/RequestsPost/core.ts index a380f1676..96eb7981d 100644 --- a/packages/components/nodes/tools/RequestsPost/core.ts +++ b/packages/components/nodes/tools/RequestsPost/core.ts @@ -1,12 +1,9 @@ -import { Tool } from '@langchain/core/tools' -import fetch from 'node-fetch' +import { z } from 'zod' +import { DynamicStructuredTool } from '../OpenAPIToolkit/core' +import { secureFetch } from '../../../src/httpSecurity' +import { parseJsonBody } from '../../../src/utils' -export const desc = `Use this when you want to POST to a website. -Input should be a json string with two keys: "url" and "data". -The value of "url" should be a string, and the value of "data" should be a dictionary of -key-value pairs you want to POST to the url as a JSON body. -Be careful to always use double quotes for strings in the json string -The output will be the text response of the POST request.` +export const desc = `Use this when you want to execute a POST request to create or update a resource.` export interface Headers { [key: string]: string @@ -21,52 +18,129 @@ export interface RequestParameters { body?: Body url?: string description?: string + name?: string + bodySchema?: string maxOutputLength?: number } -export class RequestsPostTool extends Tool { - name = 'requests_post' +// Base schema for POST request +const createRequestsPostSchema = (bodySchema?: string) => { + // If bodySchema is provided, parse it and add dynamic body params + if (bodySchema) { + try { + const parsedSchema = parseJsonBody(bodySchema) + const bodyParamsObject: Record = {} + + Object.entries(parsedSchema).forEach(([key, config]: [string, any]) => { + let zodType: z.ZodTypeAny = z.string() + + // Handle different types + if (config.type === 'number') { + zodType = z.number() + } else if (config.type === 'boolean') { + zodType = z.boolean() + } else if (config.type === 'object') { + zodType = z.record(z.any()) + } else if (config.type === 'array') { + zodType = z.array(z.any()) + } + + // Add description + if (config.description) { + zodType = zodType.describe(config.description) + } + + // Make optional if not required + if (!config.required) { + zodType = zodType.optional() + } + + bodyParamsObject[key] = zodType + }) + + if (Object.keys(bodyParamsObject).length > 0) { + return z.object({ + body: z.object(bodyParamsObject).describe('Request body parameters') + }) + } + } catch (error) { + console.warn('Failed to parse bodySchema:', error) + } + } + + // Fallback to generic body + return z.object({ + body: z.record(z.any()).optional().describe('Optional body data to include in the request') + }) +} + +export class RequestsPostTool extends DynamicStructuredTool { url = '' - description = desc maxOutputLength = Infinity headers = {} body = {} + bodySchema?: string constructor(args?: RequestParameters) { - super() + const schema = createRequestsPostSchema(args?.bodySchema) + + const toolInput = { + name: args?.name || 'requests_post', + description: args?.description || desc, + schema: schema, + baseUrl: '', + method: 'POST', + headers: args?.headers || {} + } + super(toolInput) this.url = args?.url ?? this.url this.headers = args?.headers ?? this.headers this.body = args?.body ?? this.body - this.description = args?.description ?? this.description this.maxOutputLength = args?.maxOutputLength ?? this.maxOutputLength + this.bodySchema = args?.bodySchema } /** @ignore */ - async _call(input: string) { + async _call(arg: any): Promise { + const params = { ...arg } + try { - let inputUrl = '' - let inputBody = {} - if (Object.keys(this.body).length || this.url) { - if (this.url) inputUrl = this.url - if (Object.keys(this.body).length) inputBody = this.body - } else { - const { url, data } = JSON.parse(input) - inputUrl = url - inputBody = data + const inputUrl = this.url + if (!inputUrl) { + throw new Error('URL is required for POST request') } - if (process.env.DEBUG === 'true') console.info(`Making POST API call to ${inputUrl} with body ${JSON.stringify(inputBody)}`) + let inputBody = { + ...this.body + } - const res = await fetch(inputUrl, { + if (this.bodySchema && params.body && Object.keys(params.body).length > 0) { + inputBody = { + ...inputBody, + ...params.body + } + } + + const requestHeaders = { + 'Content-Type': 'application/json', + ...(params.headers || {}), + ...this.headers + } + + const res = await secureFetch(inputUrl, { method: 'POST', - headers: this.headers, + headers: requestHeaders, body: JSON.stringify(inputBody) }) + if (!res.ok) { + throw new Error(`HTTP Error ${res.status}: ${res.statusText}`) + } + const text = await res.text() return text.slice(0, this.maxOutputLength) } catch (error) { - return `${error}` + throw new Error(`Failed to make POST request: ${error instanceof Error ? error.message : 'Unknown error'}`) } } } diff --git a/packages/components/nodes/tools/RequestsPost/post.png b/packages/components/nodes/tools/RequestsPost/post.png new file mode 100644 index 000000000..da409d5be Binary files /dev/null and b/packages/components/nodes/tools/RequestsPost/post.png differ diff --git a/packages/components/nodes/tools/RequestsPost/requestspost.svg b/packages/components/nodes/tools/RequestsPost/requestspost.svg deleted file mode 100644 index 477b1baf3..000000000 --- a/packages/components/nodes/tools/RequestsPost/requestspost.svg +++ /dev/null @@ -1,7 +0,0 @@ - - - - - - - diff --git a/packages/components/nodes/tools/RequestsPut/RequestsPut.ts b/packages/components/nodes/tools/RequestsPut/RequestsPut.ts new file mode 100644 index 000000000..3e0bab4e3 --- /dev/null +++ b/packages/components/nodes/tools/RequestsPut/RequestsPut.ts @@ -0,0 +1,155 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses, stripHTMLFromToolInput, parseJsonBody } from '../../../src/utils' +import { RequestParameters, desc, RequestsPutTool } from './core' + +const codeExample = `{ + "name": { + "type": "string", + "required": true, + "description": "Name of the item" + }, + "date": { + "type": "string", + "description": "Date of the item" + } +}` + +class RequestsPut_Tools implements INode { + label: string + name: string + version: number + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Requests Put' + this.name = 'requestsPut' + this.version = 1.0 + this.type = 'RequestsPut' + this.icon = 'put.png' + this.category = 'Tools' + this.description = 'Execute HTTP PUT requests' + this.baseClasses = [this.type, ...getBaseClasses(RequestsPutTool), 'Tool'] + this.inputs = [ + { + label: 'URL', + name: 'requestsPutUrl', + type: 'string', + acceptVariable: true + }, + { + label: 'Name', + name: 'requestsPutName', + type: 'string', + default: 'requests_put', + description: 'Name of the tool', + additionalParams: true, + optional: true + }, + { + label: 'Description', + name: 'requestsPutDescription', + type: 'string', + rows: 4, + default: desc, + description: 'Describe to LLM when it should use this tool', + additionalParams: true, + optional: true + }, + { + label: 'Headers', + name: 'requestsPutHeaders', + type: 'string', + rows: 4, + acceptVariable: true, + additionalParams: true, + optional: true, + placeholder: `{ + "Authorization": "Bearer " +}` + }, + { + label: 'Body', + name: 'requestPutBody', + type: 'string', + rows: 4, + description: 'JSON body for the PUT request. This will override the body generated by the LLM', + additionalParams: true, + acceptVariable: true, + optional: true, + placeholder: `{ + "name": "John Doe", + "age": 30 +}` + }, + { + label: 'Body Schema', + name: 'requestsPutBodySchema', + type: 'code', + description: 'Description of the available body params to enable LLM to figure out which body params to use', + placeholder: `{ + "name": { + "type": "string", + "required": true, + "description": "Name of the item" + }, + "date": { + "type": "string", + "description": "Date of the item" + } +}`, + optional: true, + hideCodeExecute: true, + additionalParams: true, + codeExample: codeExample + }, + { + label: 'Max Output Length', + name: 'requestsPutMaxOutputLength', + type: 'number', + description: 'Max length of the output. Remove this if you want to return the entire response', + default: '2000', + step: 1, + optional: true, + additionalParams: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const headers = (nodeData.inputs?.headers as string) || (nodeData.inputs?.requestsPutHeaders as string) + const url = (nodeData.inputs?.url as string) || (nodeData.inputs?.requestsPutUrl as string) + const name = (nodeData.inputs?.name as string) || (nodeData.inputs?.requestsPutName as string) + const description = (nodeData.inputs?.description as string) || (nodeData.inputs?.requestsPutDescription as string) + const body = (nodeData.inputs?.body as string) || (nodeData.inputs?.requestPutBody as string) + const bodySchema = nodeData.inputs?.requestsPutBodySchema as string + const maxOutputLength = (nodeData.inputs?.maxOutputLength as string) || (nodeData.inputs?.requestsPutMaxOutputLength as string) + + const obj: RequestParameters = {} + if (url) obj.url = stripHTMLFromToolInput(url) + if (description) obj.description = description + if (name) + obj.name = name + .toLowerCase() + .replace(/ /g, '_') + .replace(/[^a-z0-9_-]/g, '') + if (bodySchema) obj.bodySchema = stripHTMLFromToolInput(bodySchema) + if (maxOutputLength) obj.maxOutputLength = parseInt(maxOutputLength, 10) + if (headers) { + const parsedHeaders = typeof headers === 'object' ? headers : parseJsonBody(stripHTMLFromToolInput(headers)) + obj.headers = parsedHeaders + } + if (body) { + const parsedBody = typeof body === 'object' ? body : parseJsonBody(body) + obj.body = parsedBody + } + + return new RequestsPutTool(obj) + } +} + +module.exports = { nodeClass: RequestsPut_Tools } diff --git a/packages/components/nodes/tools/RequestsPut/core.ts b/packages/components/nodes/tools/RequestsPut/core.ts new file mode 100644 index 000000000..2db2b8863 --- /dev/null +++ b/packages/components/nodes/tools/RequestsPut/core.ts @@ -0,0 +1,146 @@ +import { z } from 'zod' +import { DynamicStructuredTool } from '../OpenAPIToolkit/core' +import { secureFetch } from '../../../src/httpSecurity' +import { parseJsonBody } from '../../../src/utils' + +export const desc = `Use this when you want to execute a PUT request to update or replace a resource.` + +export interface Headers { + [key: string]: string +} + +export interface Body { + [key: string]: any +} + +export interface RequestParameters { + headers?: Headers + body?: Body + url?: string + description?: string + name?: string + bodySchema?: string + maxOutputLength?: number +} + +// Base schema for PUT request +const createRequestsPutSchema = (bodySchema?: string) => { + // If bodySchema is provided, parse it and add dynamic body params + if (bodySchema) { + try { + const parsedSchema = parseJsonBody(bodySchema) + const bodyParamsObject: Record = {} + + Object.entries(parsedSchema).forEach(([key, config]: [string, any]) => { + let zodType: z.ZodTypeAny = z.string() + + // Handle different types + if (config.type === 'number') { + zodType = z.number() + } else if (config.type === 'boolean') { + zodType = z.boolean() + } else if (config.type === 'object') { + zodType = z.record(z.any()) + } else if (config.type === 'array') { + zodType = z.array(z.any()) + } + + // Add description + if (config.description) { + zodType = zodType.describe(config.description) + } + + // Make optional if not required + if (!config.required) { + zodType = zodType.optional() + } + + bodyParamsObject[key] = zodType + }) + + if (Object.keys(bodyParamsObject).length > 0) { + return z.object({ + body: z.object(bodyParamsObject).describe('Request body parameters') + }) + } + } catch (error) { + console.warn('Failed to parse bodySchema:', error) + } + } + + // Fallback to generic body + return z.object({ + body: z.record(z.any()).optional().describe('Optional body data to include in the request') + }) +} + +export class RequestsPutTool extends DynamicStructuredTool { + url = '' + maxOutputLength = Infinity + headers = {} + body = {} + bodySchema?: string + + constructor(args?: RequestParameters) { + const schema = createRequestsPutSchema(args?.bodySchema) + + const toolInput = { + name: args?.name || 'requests_put', + description: args?.description || desc, + schema: schema, + baseUrl: '', + method: 'PUT', + headers: args?.headers || {} + } + super(toolInput) + this.url = args?.url ?? this.url + this.headers = args?.headers ?? this.headers + this.body = args?.body ?? this.body + this.maxOutputLength = args?.maxOutputLength ?? this.maxOutputLength + this.bodySchema = args?.bodySchema + } + + /** @ignore */ + async _call(arg: any): Promise { + const params = { ...arg } + + try { + const inputUrl = this.url + if (!inputUrl) { + throw new Error('URL is required for PUT request') + } + + let inputBody = { + ...this.body + } + + if (this.bodySchema && params.body && Object.keys(params.body).length > 0) { + inputBody = { + ...inputBody, + ...params.body + } + } + + const requestHeaders = { + 'Content-Type': 'application/json', + ...(params.headers || {}), + ...this.headers + } + + const res = await secureFetch(inputUrl, { + method: 'PUT', + headers: requestHeaders, + body: JSON.stringify(inputBody) + }) + + if (!res.ok) { + throw new Error(`HTTP Error ${res.status}: ${res.statusText}`) + } + + const text = await res.text() + return text.slice(0, this.maxOutputLength) + } catch (error) { + throw new Error(`Failed to make PUT request: ${error instanceof Error ? error.message : 'Unknown error'}`) + } + } +} diff --git a/packages/components/nodes/tools/RequestsPut/put.png b/packages/components/nodes/tools/RequestsPut/put.png new file mode 100644 index 000000000..f60c6f46b Binary files /dev/null and b/packages/components/nodes/tools/RequestsPut/put.png differ diff --git a/packages/components/nodes/tools/RetrieverTool/RetrieverTool.ts b/packages/components/nodes/tools/RetrieverTool/RetrieverTool.ts index 0010bce9c..f23701e6b 100644 --- a/packages/components/nodes/tools/RetrieverTool/RetrieverTool.ts +++ b/packages/components/nodes/tools/RetrieverTool/RetrieverTool.ts @@ -3,7 +3,7 @@ import { CallbackManager, CallbackManagerForToolRun, Callbacks, parseCallbackCon import { BaseDynamicToolInput, DynamicTool, StructuredTool, ToolInputParsingException } from '@langchain/core/tools' import { BaseRetriever } from '@langchain/core/retrievers' import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' -import { getBaseClasses, resolveFlowObjValue } from '../../../src/utils' +import { getBaseClasses, resolveFlowObjValue, parseWithTypeConversion } from '../../../src/utils' import { SOURCE_DOCUMENTS_PREFIX } from '../../../src/agents' import { RunnableConfig } from '@langchain/core/runnables' import { VectorStoreRetriever } from '@langchain/core/vectorstores' @@ -58,7 +58,7 @@ class DynamicStructuredTool = z.ZodObj } let parsed try { - parsed = await this.schema.parseAsync(arg) + parsed = await parseWithTypeConversion(this.schema, arg) } catch (e) { throw new ToolInputParsingException(`Received tool input did not match expected schema`, JSON.stringify(arg)) } @@ -173,7 +173,8 @@ class Retriever_Tools implements INode { hint: { label: 'What can you filter?', value: howToUse - } + }, + acceptVariable: true } ] } diff --git a/packages/components/nodes/tools/StripeTool/stripe.png b/packages/components/nodes/tools/StripeTool/stripe.png index b73ff0db3..91867c796 100644 Binary files a/packages/components/nodes/tools/StripeTool/stripe.png and b/packages/components/nodes/tools/StripeTool/stripe.png differ diff --git a/packages/components/nodes/tools/WriteFile/WriteFile.ts b/packages/components/nodes/tools/WriteFile/WriteFile.ts deleted file mode 100644 index bcb372f86..000000000 --- a/packages/components/nodes/tools/WriteFile/WriteFile.ts +++ /dev/null @@ -1,87 +0,0 @@ -import { z } from 'zod' -import { StructuredTool, ToolParams } from '@langchain/core/tools' -import { Serializable } from '@langchain/core/load/serializable' -import { NodeFileStore } from 'langchain/stores/file/node' -import { INode, INodeData, INodeParams } from '../../../src/Interface' -import { getBaseClasses } from '../../../src/utils' - -abstract class BaseFileStore extends Serializable { - abstract readFile(path: string): Promise - abstract writeFile(path: string, contents: string): Promise -} - -class WriteFile_Tools implements INode { - label: string - name: string - version: number - description: string - type: string - icon: string - category: string - baseClasses: string[] - inputs: INodeParams[] - - constructor() { - this.label = 'Write File' - this.name = 'writeFile' - this.version = 1.0 - this.type = 'WriteFile' - this.icon = 'writefile.svg' - this.category = 'Tools' - this.description = 'Write file to disk' - this.baseClasses = [this.type, 'Tool', ...getBaseClasses(WriteFileTool)] - this.inputs = [ - { - label: 'Base Path', - name: 'basePath', - placeholder: `C:\\Users\\User\\Desktop`, - type: 'string', - optional: true - } - ] - } - - async init(nodeData: INodeData): Promise { - const basePath = nodeData.inputs?.basePath as string - const store = basePath ? new NodeFileStore(basePath) : new NodeFileStore() - return new WriteFileTool({ store }) - } -} - -interface WriteFileParams extends ToolParams { - store: BaseFileStore -} - -/** - * Class for writing data to files on the disk. Extends the StructuredTool - * class. - */ -export class WriteFileTool extends StructuredTool { - static lc_name() { - return 'WriteFileTool' - } - - schema = z.object({ - file_path: z.string().describe('name of file'), - text: z.string().describe('text to write to file') - }) as any - - name = 'write_file' - - description = 'Write file from disk' - - store: BaseFileStore - - constructor({ store, ...rest }: WriteFileParams) { - super(rest) - - this.store = store - } - - async _call({ file_path, text }: z.infer) { - await this.store.writeFile(file_path, text) - return 'File written to successfully.' - } -} - -module.exports = { nodeClass: WriteFile_Tools } diff --git a/packages/components/nodes/tools/WriteFile/writefile.svg b/packages/components/nodes/tools/WriteFile/writefile.svg deleted file mode 100644 index 0df04ea44..000000000 --- a/packages/components/nodes/tools/WriteFile/writefile.svg +++ /dev/null @@ -1,4 +0,0 @@ - - - - diff --git a/packages/components/nodes/utilities/CustomFunction/CustomFunction.ts b/packages/components/nodes/utilities/CustomFunction/CustomFunction.ts index 079f186ba..d4cfd5d32 100644 --- a/packages/components/nodes/utilities/CustomFunction/CustomFunction.ts +++ b/packages/components/nodes/utilities/CustomFunction/CustomFunction.ts @@ -1,8 +1,7 @@ import { flatten } from 'lodash' import { type StructuredTool } from '@langchain/core/tools' -import { NodeVM } from '@flowiseai/nodevm' import { DataSource } from 'typeorm' -import { availableDependencies, defaultAllowBuiltInDep, getVars, handleEscapeCharacters, prepareSandboxVars } from '../../../src/utils' +import { getVars, handleEscapeCharacters, executeJavaScriptCode, createCodeExecutionSandbox } from '../../../src/utils' import { ICommonObject, IDatabaseEntity, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' class CustomFunction_Utilities implements INode { @@ -83,7 +82,7 @@ class CustomFunction_Utilities implements INode { const databaseEntities = options.databaseEntities as IDatabaseEntity const tools = Object.fromEntries((flatten(nodeData.inputs?.tools) as StructuredTool[])?.map((tool) => [tool.name, tool]) ?? []) - const variables = await getVars(appDataSource, databaseEntities, nodeData) + const variables = await getVars(appDataSource, databaseEntities, nodeData, options) const flow = { chatflowId: options.chatflowid, sessionId: options.sessionId, @@ -118,45 +117,22 @@ class CustomFunction_Utilities implements INode { } } - let sandbox: any = { - $input: input, - util: undefined, - Symbol: undefined, - child_process: undefined, - fs: undefined, - process: undefined + // Create additional sandbox variables + const additionalSandbox: ICommonObject = { + $tools: tools } - sandbox['$vars'] = prepareSandboxVars(variables) - sandbox['$flow'] = flow - sandbox['$tools'] = tools + // Add input variables to sandbox if (Object.keys(inputVars).length) { for (const item in inputVars) { - sandbox[`$${item}`] = inputVars[item] + additionalSandbox[`$${item}`] = inputVars[item] } } - const builtinDeps = process.env.TOOL_FUNCTION_BUILTIN_DEP - ? defaultAllowBuiltInDep.concat(process.env.TOOL_FUNCTION_BUILTIN_DEP.split(',')) - : defaultAllowBuiltInDep - const externalDeps = process.env.TOOL_FUNCTION_EXTERNAL_DEP ? process.env.TOOL_FUNCTION_EXTERNAL_DEP.split(',') : [] - const deps = availableDependencies.concat(externalDeps) + const sandbox = createCodeExecutionSandbox(input, variables, flow, additionalSandbox) - const nodeVMOptions = { - console: 'inherit', - sandbox, - require: { - external: { modules: deps }, - builtin: builtinDeps - }, - eval: false, - wasm: false, - timeout: 10000 - } as any - - const vm = new NodeVM(nodeVMOptions) try { - const response = await vm.run(`module.exports = async function() {${javascriptFunction}}()`, __dirname) + const response = await executeJavaScriptCode(javascriptFunction, sandbox) if (typeof response === 'string' && !isEndingNode) { return handleEscapeCharacters(response, false) diff --git a/packages/components/nodes/utilities/IfElseFunction/IfElseFunction.ts b/packages/components/nodes/utilities/IfElseFunction/IfElseFunction.ts index 08b811636..572762b32 100644 --- a/packages/components/nodes/utilities/IfElseFunction/IfElseFunction.ts +++ b/packages/components/nodes/utilities/IfElseFunction/IfElseFunction.ts @@ -1,6 +1,5 @@ -import { NodeVM } from '@flowiseai/nodevm' import { DataSource } from 'typeorm' -import { availableDependencies, defaultAllowBuiltInDep, getVars, handleEscapeCharacters, prepareSandboxVars } from '../../../src/utils' +import { getVars, handleEscapeCharacters, executeJavaScriptCode, createCodeExecutionSandbox } from '../../../src/utils' import { ICommonObject, IDatabaseEntity, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' class IfElseFunction_Utilities implements INode { @@ -85,7 +84,7 @@ class IfElseFunction_Utilities implements INode { const appDataSource = options.appDataSource as DataSource const databaseEntities = options.databaseEntities as IDatabaseEntity - const variables = await getVars(appDataSource, databaseEntities, nodeData) + const variables = await getVars(appDataSource, databaseEntities, nodeData, options) const flow = { chatflowId: options.chatflowid, sessionId: options.sessionId, @@ -119,48 +118,26 @@ class IfElseFunction_Utilities implements INode { } } - let sandbox: any = { - $input: input, - util: undefined, - Symbol: undefined, - child_process: undefined, - fs: undefined, - process: undefined - } - sandbox['$vars'] = prepareSandboxVars(variables) - sandbox['$flow'] = flow + // Create additional sandbox variables + const additionalSandbox: ICommonObject = {} + // Add input variables to sandbox if (Object.keys(inputVars).length) { for (const item in inputVars) { - sandbox[`$${item}`] = inputVars[item] + additionalSandbox[`$${item}`] = inputVars[item] } } - const builtinDeps = process.env.TOOL_FUNCTION_BUILTIN_DEP - ? defaultAllowBuiltInDep.concat(process.env.TOOL_FUNCTION_BUILTIN_DEP.split(',')) - : defaultAllowBuiltInDep - const externalDeps = process.env.TOOL_FUNCTION_EXTERNAL_DEP ? process.env.TOOL_FUNCTION_EXTERNAL_DEP.split(',') : [] - const deps = availableDependencies.concat(externalDeps) + const sandbox = createCodeExecutionSandbox(input, variables, flow, additionalSandbox) - const nodeVMOptions = { - console: 'inherit', - sandbox, - require: { - external: { modules: deps }, - builtin: builtinDeps - }, - eval: false, - wasm: false, - timeout: 10000 - } as any - - const vm = new NodeVM(nodeVMOptions) try { - const responseTrue = await vm.run(`module.exports = async function() {${ifFunction}}()`, __dirname) + const responseTrue = await executeJavaScriptCode(ifFunction, sandbox) + if (responseTrue) return { output: typeof responseTrue === 'string' ? handleEscapeCharacters(responseTrue, false) : responseTrue, type: true } - const responseFalse = await vm.run(`module.exports = async function() {${elseFunction}}()`, __dirname) + const responseFalse = await executeJavaScriptCode(elseFunction, sandbox) + return { output: typeof responseFalse === 'string' ? handleEscapeCharacters(responseFalse, false) : responseFalse, type: false } } catch (e) { throw new Error(e) diff --git a/packages/components/nodes/vectorstores/Chroma/Chroma.ts b/packages/components/nodes/vectorstores/Chroma/Chroma.ts index 62f4b8a64..dabd747ca 100644 --- a/packages/components/nodes/vectorstores/Chroma/Chroma.ts +++ b/packages/components/nodes/vectorstores/Chroma/Chroma.ts @@ -3,7 +3,7 @@ import { Chroma } from '@langchain/community/vectorstores/chroma' import { Embeddings } from '@langchain/core/embeddings' import { Document } from '@langchain/core/documents' import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface' -import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' +import { getBaseClasses, getCredentialData, getCredentialParam, parseJsonBody } from '../../../src/utils' import { ChromaExtended } from './core' import { index } from '../../../src/indexing' @@ -74,7 +74,8 @@ class Chroma_VectorStores implements INode { name: 'chromaMetadataFilter', type: 'json', optional: true, - additionalParams: true + additionalParams: true, + acceptVariable: true }, { label: 'Top K', @@ -213,7 +214,6 @@ class Chroma_VectorStores implements INode { const chromaApiKey = getCredentialParam('chromaApiKey', credentialData, nodeData) const chromaTenant = getCredentialParam('chromaTenant', credentialData, nodeData) const chromaDatabase = getCredentialParam('chromaDatabase', credentialData, nodeData) - const chromaMetadataFilter = nodeData.inputs?.chromaMetadataFilter const obj: { @@ -229,7 +229,7 @@ class Chroma_VectorStores implements INode { if (chromaTenant) obj.chromaTenant = chromaTenant if (chromaDatabase) obj.chromaDatabase = chromaDatabase if (chromaMetadataFilter) { - const metadatafilter = typeof chromaMetadataFilter === 'object' ? chromaMetadataFilter : JSON.parse(chromaMetadataFilter) + const metadatafilter = typeof chromaMetadataFilter === 'object' ? chromaMetadataFilter : parseJsonBody(chromaMetadataFilter) obj.filter = metadatafilter } diff --git a/packages/components/nodes/vectorstores/Chroma/core.ts b/packages/components/nodes/vectorstores/Chroma/core.ts index bf6f48dab..293209dac 100644 --- a/packages/components/nodes/vectorstores/Chroma/core.ts +++ b/packages/components/nodes/vectorstores/Chroma/core.ts @@ -32,7 +32,7 @@ export class ChromaExtended extends Chroma { if (this.chromaApiKey) { obj.fetchOptions = { headers: { - Authorization: `Bearer ${this.chromaApiKey}` + 'x-chroma-token': this.chromaApiKey } } } diff --git a/packages/components/nodes/vectorstores/Couchbase/Couchbase.ts b/packages/components/nodes/vectorstores/Couchbase/Couchbase.ts index 83c45271e..4c6eee44c 100644 --- a/packages/components/nodes/vectorstores/Couchbase/Couchbase.ts +++ b/packages/components/nodes/vectorstores/Couchbase/Couchbase.ts @@ -4,7 +4,7 @@ import { Document } from '@langchain/core/documents' import { CouchbaseVectorStore, CouchbaseVectorStoreArgs } from '@langchain/community/vectorstores/couchbase' import { Cluster } from 'couchbase' import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface' -import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' +import { getBaseClasses, getCredentialData, getCredentialParam, parseJsonBody } from '../../../src/utils' import { resolveVectorStoreOrRetriever } from '../VectorStoreUtils' class Couchbase_VectorStores implements INode { @@ -96,7 +96,8 @@ class Couchbase_VectorStores implements INode { name: 'couchbaseMetadataFilter', type: 'json', optional: true, - additionalParams: true + additionalParams: true, + acceptVariable: true }, { label: 'Top K', @@ -214,7 +215,8 @@ class Couchbase_VectorStores implements INode { if (!embeddingKey || embeddingKey === '') couchbaseConfig.embeddingKey = 'embedding' if (couchbaseMetadataFilter) { - metadatafilter = typeof couchbaseMetadataFilter === 'object' ? couchbaseMetadataFilter : JSON.parse(couchbaseMetadataFilter) + metadatafilter = + typeof couchbaseMetadataFilter === 'object' ? couchbaseMetadataFilter : parseJsonBody(couchbaseMetadataFilter) } const vectorStore = await CouchbaseVectorStore.initialize(embeddings, couchbaseConfig) diff --git a/packages/components/nodes/vectorstores/DocumentStoreVS/DocStoreVector.ts b/packages/components/nodes/vectorstores/DocumentStoreVS/DocStoreVector.ts index ba2660e0b..0f228d1fb 100644 --- a/packages/components/nodes/vectorstores/DocumentStoreVS/DocStoreVector.ts +++ b/packages/components/nodes/vectorstores/DocumentStoreVS/DocStoreVector.ts @@ -56,7 +56,8 @@ class DocStore_VectorStores implements INode { return returnData } - const stores = await appDataSource.getRepository(databaseEntities['DocumentStore']).find() + const searchOptions = options.searchOptions || {} + const stores = await appDataSource.getRepository(databaseEntities['DocumentStore']).findBy(searchOptions) for (const store of stores) { if (store.status === 'UPSERTED') { const obj = { diff --git a/packages/components/nodes/vectorstores/Kendra/Kendra.ts b/packages/components/nodes/vectorstores/Kendra/Kendra.ts new file mode 100644 index 000000000..f3eb94368 --- /dev/null +++ b/packages/components/nodes/vectorstores/Kendra/Kendra.ts @@ -0,0 +1,294 @@ +import { flatten } from 'lodash' +import { AmazonKendraRetriever } from '@langchain/aws' +import { KendraClient, BatchPutDocumentCommand, BatchDeleteDocumentCommand } from '@aws-sdk/client-kendra' +import { Document } from '@langchain/core/documents' +import { ICommonObject, INode, INodeData, INodeOptionsValue, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface' +import { FLOWISE_CHATID, getCredentialData, getCredentialParam, parseJsonBody } from '../../../src/utils' +import { howToUseFileUpload } from '../VectorStoreUtils' +import { MODEL_TYPE, getRegions } from '../../../src/modelLoader' + +class Kendra_VectorStores implements INode { + label: string + name: string + version: number + description: string + type: string + icon: string + category: string + badge: string + baseClasses: string[] + inputs: INodeParams[] + credential: INodeParams + outputs: INodeOutputsValue[] + + constructor() { + this.label = 'AWS Kendra' + this.name = 'kendra' + this.version = 1.0 + this.type = 'Kendra' + this.icon = 'kendra.svg' + this.category = 'Vector Stores' + this.description = `Use AWS Kendra's intelligent search service for document retrieval and semantic search` + this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.credential = { + label: 'AWS Credential', + name: 'credential', + type: 'credential', + credentialNames: ['awsApi'], + optional: true + } + this.inputs = [ + { + label: 'Document', + name: 'document', + type: 'Document', + list: true, + optional: true + }, + { + label: 'Region', + name: 'region', + type: 'asyncOptions', + loadMethod: 'listRegions', + default: 'us-east-1' + }, + { + label: 'Kendra Index ID', + name: 'indexId', + type: 'string', + placeholder: 'xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx', + description: 'The ID of your AWS Kendra index' + }, + { + label: 'File Upload', + name: 'fileUpload', + description: 'Allow file upload on the chat', + hint: { + label: 'How to use', + value: howToUseFileUpload + }, + type: 'boolean', + additionalParams: true, + optional: true + }, + { + label: 'Top K', + name: 'topK', + description: 'Number of top results to fetch. Default to 10', + placeholder: '10', + type: 'number', + additionalParams: true, + optional: true + }, + { + label: 'Attribute Filter', + name: 'attributeFilter', + description: 'Optional filter to apply when retrieving documents', + type: 'json', + optional: true, + additionalParams: true, + acceptVariable: true + } + ] + // Note: Kendra doesn't support MMR search, but keeping the structure consistent + this.outputs = [ + { + label: 'Kendra Retriever', + name: 'retriever', + baseClasses: this.baseClasses + }, + { + label: 'Kendra Vector Store', + name: 'vectorStore', + baseClasses: [this.type, 'BaseRetriever'] + } + ] + } + + loadMethods = { + async listRegions(): Promise { + return await getRegions(MODEL_TYPE.CHAT, 'awsChatBedrock') + } + } + + //@ts-ignore + vectorStoreMethods = { + async upsert(nodeData: INodeData, options: ICommonObject): Promise> { + const indexId = nodeData.inputs?.indexId as string + const region = nodeData.inputs?.region as string + const docs = nodeData.inputs?.document as Document[] + const isFileUploadEnabled = nodeData.inputs?.fileUpload as boolean + + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + let clientConfig: any = { region } + + if (credentialData && Object.keys(credentialData).length !== 0) { + const accessKeyId = getCredentialParam('awsKey', credentialData, nodeData) + const secretAccessKey = getCredentialParam('awsSecret', credentialData, nodeData) + const sessionToken = getCredentialParam('awsSession', credentialData, nodeData) + + if (accessKeyId && secretAccessKey) { + clientConfig.credentials = { + accessKeyId, + secretAccessKey, + ...(sessionToken && { sessionToken }) + } + } + } + + const client = new KendraClient(clientConfig) + + const flattenDocs = docs && docs.length ? flatten(docs) : [] + const finalDocs = [] + const kendraDocuments = [] + + for (let i = 0; i < flattenDocs.length; i += 1) { + if (flattenDocs[i] && flattenDocs[i].pageContent) { + if (isFileUploadEnabled && options.chatId) { + flattenDocs[i].metadata = { ...flattenDocs[i].metadata, [FLOWISE_CHATID]: options.chatId } + } + finalDocs.push(new Document(flattenDocs[i])) + + // Prepare document for Kendra + const docId = `doc_${Date.now()}_${i}` + const docTitle = flattenDocs[i].metadata?.title || flattenDocs[i].metadata?.source || `Document ${i + 1}` + + kendraDocuments.push({ + Id: docId, + Title: docTitle, + Blob: new Uint8Array(Buffer.from(flattenDocs[i].pageContent, 'utf-8')), + ContentType: 'PLAIN_TEXT' as any + }) + } + } + + try { + if (kendraDocuments.length > 0) { + // Kendra has a limit of 10 documents per batch + const batchSize = 10 + for (let i = 0; i < kendraDocuments.length; i += batchSize) { + const batch = kendraDocuments.slice(i, i + batchSize) + const command = new BatchPutDocumentCommand({ + IndexId: indexId, + Documents: batch + }) + + const response = await client.send(command) + + if (response.FailedDocuments && response.FailedDocuments.length > 0) { + console.error('Failed documents:', response.FailedDocuments) + throw new Error(`Failed to index some documents: ${JSON.stringify(response.FailedDocuments)}`) + } + } + } + + return { numAdded: finalDocs.length, addedDocs: finalDocs } + } catch (error) { + throw new Error(`Failed to index documents to Kendra: ${error}`) + } + }, + + async delete(nodeData: INodeData, ids: string[], options: ICommonObject): Promise { + const indexId = nodeData.inputs?.indexId as string + const region = nodeData.inputs?.region as string + + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + let clientConfig: any = { region } + + if (credentialData && Object.keys(credentialData).length !== 0) { + const accessKeyId = getCredentialParam('awsKey', credentialData, nodeData) + const secretAccessKey = getCredentialParam('awsSecret', credentialData, nodeData) + const sessionToken = getCredentialParam('awsSession', credentialData, nodeData) + + if (accessKeyId && secretAccessKey) { + clientConfig.credentials = { + accessKeyId, + secretAccessKey, + ...(sessionToken && { sessionToken }) + } + } + } + + const client = new KendraClient(clientConfig) + + try { + // Kendra has a limit of 10 documents per batch delete + const batchSize = 10 + for (let i = 0; i < ids.length; i += batchSize) { + const batch = ids.slice(i, i + batchSize) + const command = new BatchDeleteDocumentCommand({ + IndexId: indexId, + DocumentIdList: batch + }) + await client.send(command) + } + } catch (error) { + throw new Error(`Failed to delete documents from Kendra: ${error}`) + } + } + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const indexId = nodeData.inputs?.indexId as string + const region = nodeData.inputs?.region as string + const topK = nodeData.inputs?.topK as string + const attributeFilter = nodeData.inputs?.attributeFilter + const isFileUploadEnabled = nodeData.inputs?.fileUpload as boolean + + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + let clientOptions: any = {} + + if (credentialData && Object.keys(credentialData).length !== 0) { + clientOptions.credentials = { + accessKeyId: getCredentialParam('awsKey', credentialData, nodeData), + secretAccessKey: getCredentialParam('awsSecret', credentialData, nodeData), + sessionToken: getCredentialParam('awsSession', credentialData, nodeData) + } + } + + let filter = undefined + if (attributeFilter) { + filter = typeof attributeFilter === 'object' ? attributeFilter : parseJsonBody(attributeFilter) + } + + // Add chat-specific filtering if file upload is enabled + if (isFileUploadEnabled && options.chatId) { + if (!filter) { + filter = {} + } + filter.OrAllFilters = [ + ...(filter.OrAllFilters || []), + { + EqualsTo: { + Key: FLOWISE_CHATID, + Value: { + StringValue: options.chatId + } + } + } + ] + } + + const retriever = new AmazonKendraRetriever({ + topK: topK ? parseInt(topK) : 10, + indexId, + region, + attributeFilter: filter, + clientOptions + }) + + const output = nodeData.outputs?.output as string + + if (output === 'retriever') { + return retriever + } else if (output === 'vectorStore') { + // Kendra doesn't have a traditional vector store interface, + // but we can return the retriever with additional properties + ;(retriever as any).k = topK ? parseInt(topK) : 10 + ;(retriever as any).filter = filter + return retriever + } + } +} + +module.exports = { nodeClass: Kendra_VectorStores } diff --git a/packages/components/nodes/vectorstores/Kendra/kendra.svg b/packages/components/nodes/vectorstores/Kendra/kendra.svg new file mode 100644 index 000000000..89f101bd5 --- /dev/null +++ b/packages/components/nodes/vectorstores/Kendra/kendra.svg @@ -0,0 +1,31 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Kendra + + \ No newline at end of file diff --git a/packages/components/nodes/vectorstores/Meilisearch/Meilisearch.ts b/packages/components/nodes/vectorstores/Meilisearch/Meilisearch.ts index eed428cdb..b40f4f531 100644 --- a/packages/components/nodes/vectorstores/Meilisearch/Meilisearch.ts +++ b/packages/components/nodes/vectorstores/Meilisearch/Meilisearch.ts @@ -79,7 +79,7 @@ class MeilisearchRetriever_node implements INode { label: 'Semantic Ratio', name: 'semanticRatio', type: 'number', - description: 'percentage of sematic reasoning in meilisearch hybrid search, default is 0.75', + description: 'percentage of semantic reasoning in meilisearch hybrid search, default is 0.75', additionalParams: true, optional: true }, @@ -89,7 +89,8 @@ class MeilisearchRetriever_node implements INode { type: 'string', description: 'search filter to apply on searchable attributes', additionalParams: true, - optional: true + optional: true, + acceptVariable: true } ] this.outputs = [ @@ -161,7 +162,7 @@ class MeilisearchRetriever_node implements INode { } } catch (error) { console.error(error) - console.warn('Error occured when deleting your index, if it did not exist, we will create one for you... ') + console.warn('Error occurred when deleting your index, if it did not exist, we will create one for you... ') } } diff --git a/packages/components/nodes/vectorstores/Milvus/Milvus.ts b/packages/components/nodes/vectorstores/Milvus/Milvus.ts index e10c2cfc5..527d57bf5 100644 --- a/packages/components/nodes/vectorstores/Milvus/Milvus.ts +++ b/packages/components/nodes/vectorstores/Milvus/Milvus.ts @@ -100,7 +100,8 @@ class Milvus_VectorStores implements INode { description: 'Filter data with a simple string query. Refer Milvus docs for more details.', placeholder: 'doc=="a"', - additionalParams: true + additionalParams: true, + acceptVariable: true }, { label: 'Top K', @@ -395,7 +396,18 @@ const similaritySearchVectorWithScore = async (query: number[], k: number, vecto } } }) - results.push([new Document(fields), result.score]) + let normalizedScore = result.score + switch (vectorStore.indexCreateParams.metric_type) { + case MetricType.L2: + normalizedScore = 1 / (1 + result.score) + break + case MetricType.IP: + case MetricType.COSINE: + normalizedScore = (result.score + 1) / 2 + break + } + + results.push([new Document(fields), normalizedScore]) }) return results } diff --git a/packages/components/nodes/vectorstores/MongoDBAtlas/MongoDBAtlas.ts b/packages/components/nodes/vectorstores/MongoDBAtlas/MongoDBAtlas.ts index 785c64484..e33ad9753 100644 --- a/packages/components/nodes/vectorstores/MongoDBAtlas/MongoDBAtlas.ts +++ b/packages/components/nodes/vectorstores/MongoDBAtlas/MongoDBAtlas.ts @@ -2,7 +2,7 @@ import { flatten } from 'lodash' import { Embeddings } from '@langchain/core/embeddings' import { Document } from '@langchain/core/documents' import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface' -import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' +import { getBaseClasses, getCredentialData, getCredentialParam, parseJsonBody } from '../../../src/utils' import { addMMRInputParams, resolveVectorStoreOrRetriever } from '../VectorStoreUtils' import { MongoDBAtlasVectorSearch } from './core' @@ -90,7 +90,8 @@ class MongoDBAtlas_VectorStores implements INode { name: 'mongoMetadataFilter', type: 'json', optional: true, - additionalParams: true + additionalParams: true, + acceptVariable: true }, { label: 'Top K', @@ -186,7 +187,7 @@ class MongoDBAtlas_VectorStores implements INode { }) if (mongoMetadataFilter) { - const metadataFilter = typeof mongoMetadataFilter === 'object' ? mongoMetadataFilter : JSON.parse(mongoMetadataFilter) + const metadataFilter = typeof mongoMetadataFilter === 'object' ? mongoMetadataFilter : parseJsonBody(mongoMetadataFilter) for (const key in metadataFilter) { mongoDbFilter.preFilter = { diff --git a/packages/components/nodes/vectorstores/Pinecone/Pinecone.ts b/packages/components/nodes/vectorstores/Pinecone/Pinecone.ts index c9ff257fb..0e2fb8247 100644 --- a/packages/components/nodes/vectorstores/Pinecone/Pinecone.ts +++ b/packages/components/nodes/vectorstores/Pinecone/Pinecone.ts @@ -5,7 +5,7 @@ import { Embeddings } from '@langchain/core/embeddings' import { Document } from '@langchain/core/documents' import { VectorStore } from '@langchain/core/vectorstores' import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface' -import { FLOWISE_CHATID, getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' +import { FLOWISE_CHATID, getBaseClasses, getCredentialData, getCredentialParam, parseJsonBody } from '../../../src/utils' import { addMMRInputParams, howToUseFileUpload, resolveVectorStoreOrRetriever } from '../VectorStoreUtils' import { index } from '../../../src/indexing' @@ -97,7 +97,8 @@ class Pinecone_VectorStores implements INode { name: 'pineconeMetadataFilter', type: 'json', optional: true, - additionalParams: true + additionalParams: true, + acceptVariable: true }, { label: 'Top K', @@ -247,7 +248,8 @@ class Pinecone_VectorStores implements INode { if (pineconeNamespace) obj.namespace = pineconeNamespace if (pineconeMetadataFilter) { - const metadatafilter = typeof pineconeMetadataFilter === 'object' ? pineconeMetadataFilter : JSON.parse(pineconeMetadataFilter) + const metadatafilter = + typeof pineconeMetadataFilter === 'object' ? pineconeMetadataFilter : parseJsonBody(pineconeMetadataFilter) obj.filter = metadatafilter } if (isFileUploadEnabled && options.chatId) { diff --git a/packages/components/nodes/vectorstores/Pinecone/Pinecone_LlamaIndex.ts b/packages/components/nodes/vectorstores/Pinecone/Pinecone_LlamaIndex.ts index ad0371ef5..61aa173c2 100644 --- a/packages/components/nodes/vectorstores/Pinecone/Pinecone_LlamaIndex.ts +++ b/packages/components/nodes/vectorstores/Pinecone/Pinecone_LlamaIndex.ts @@ -16,7 +16,7 @@ import { FetchResponse, Index, Pinecone, ScoredPineconeRecord } from '@pinecone- import { flatten } from 'lodash' import { Document as LCDocument } from 'langchain/document' import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface' -import { flattenObject, getCredentialData, getCredentialParam } from '../../../src/utils' +import { flattenObject, getCredentialData, getCredentialParam, parseJsonBody } from '../../../src/utils' class PineconeLlamaIndex_VectorStores implements INode { label: string @@ -176,7 +176,7 @@ class PineconeLlamaIndex_VectorStores implements INode { let metadatafilter = {} if (pineconeMetadataFilter) { - metadatafilter = typeof pineconeMetadataFilter === 'object' ? pineconeMetadataFilter : JSON.parse(pineconeMetadataFilter) + metadatafilter = typeof pineconeMetadataFilter === 'object' ? pineconeMetadataFilter : parseJsonBody(pineconeMetadataFilter) obj.queryFilter = metadatafilter } diff --git a/packages/components/nodes/vectorstores/Postgres/Postgres.ts b/packages/components/nodes/vectorstores/Postgres/Postgres.ts index ad0f82bb0..98f8b07f6 100644 --- a/packages/components/nodes/vectorstores/Postgres/Postgres.ts +++ b/packages/components/nodes/vectorstores/Postgres/Postgres.ts @@ -1,7 +1,7 @@ import { flatten } from 'lodash' import { Document } from '@langchain/core/documents' import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface' -import { FLOWISE_CHATID, getBaseClasses } from '../../../src/utils' +import { FLOWISE_CHATID, getBaseClasses, parseJsonBody } from '../../../src/utils' import { index } from '../../../src/indexing' import { howToUseFileUpload } from '../VectorStoreUtils' import { VectorStore } from '@langchain/core/vectorstores' @@ -194,7 +194,8 @@ class Postgres_VectorStores implements INode { name: 'pgMetadataFilter', type: 'json', additionalParams: true, - optional: true + optional: true, + acceptVariable: true }, { label: 'Content Column Name', @@ -307,7 +308,7 @@ class Postgres_VectorStores implements INode { let pgMetadataFilter: any if (_pgMetadataFilter) { - pgMetadataFilter = typeof _pgMetadataFilter === 'object' ? _pgMetadataFilter : JSON.parse(_pgMetadataFilter) + pgMetadataFilter = typeof _pgMetadataFilter === 'object' ? _pgMetadataFilter : parseJsonBody(_pgMetadataFilter) } if (isFileUploadEnabled && options.chatId) { pgMetadataFilter = { diff --git a/packages/components/nodes/vectorstores/Qdrant/Qdrant.ts b/packages/components/nodes/vectorstores/Qdrant/Qdrant.ts index 2bdbcf353..6d292119e 100644 --- a/packages/components/nodes/vectorstores/Qdrant/Qdrant.ts +++ b/packages/components/nodes/vectorstores/Qdrant/Qdrant.ts @@ -6,7 +6,7 @@ import { Document } from '@langchain/core/documents' import { QdrantVectorStore, QdrantLibArgs } from '@langchain/qdrant' import { Embeddings } from '@langchain/core/embeddings' import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface' -import { FLOWISE_CHATID, getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' +import { FLOWISE_CHATID, getBaseClasses, getCredentialData, getCredentialParam, parseJsonBody } from '../../../src/utils' import { index } from '../../../src/indexing' import { howToUseFileUpload } from '../VectorStoreUtils' @@ -77,7 +77,8 @@ class Qdrant_VectorStores implements INode { { label: 'Qdrant Collection Name', name: 'qdrantCollection', - type: 'string' + type: 'string', + acceptVariable: true }, { label: 'File Upload', @@ -171,7 +172,8 @@ class Qdrant_VectorStores implements INode { description: 'Only return points which satisfy the conditions', type: 'json', additionalParams: true, - optional: true + optional: true, + acceptVariable: true } ] this.outputs = [ @@ -438,7 +440,7 @@ class Qdrant_VectorStores implements INode { qdrantCollectionConfiguration = typeof qdrantCollectionConfiguration === 'object' ? qdrantCollectionConfiguration - : JSON.parse(qdrantCollectionConfiguration) + : parseJsonBody(qdrantCollectionConfiguration) dbConfig.collectionConfig = { ...qdrantCollectionConfiguration, vectors: { @@ -450,7 +452,7 @@ class Qdrant_VectorStores implements INode { } if (queryFilter) { - retrieverConfig.filter = typeof queryFilter === 'object' ? queryFilter : JSON.parse(queryFilter) + retrieverConfig.filter = typeof queryFilter === 'object' ? queryFilter : parseJsonBody(queryFilter) } if (isFileUploadEnabled && options.chatId) { retrieverConfig.filter = retrieverConfig.filter || {} diff --git a/packages/components/nodes/vectorstores/Redis/Redis.ts b/packages/components/nodes/vectorstores/Redis/Redis.ts index d4fbcf49e..23f124139 100644 --- a/packages/components/nodes/vectorstores/Redis/Redis.ts +++ b/packages/components/nodes/vectorstores/Redis/Redis.ts @@ -153,8 +153,12 @@ class Redis_VectorStores implements INode { keepAlive: process.env.REDIS_KEEP_ALIVE && !isNaN(parseInt(process.env.REDIS_KEEP_ALIVE, 10)) ? parseInt(process.env.REDIS_KEEP_ALIVE, 10) - : undefined // milliseconds - } + : undefined + }, + pingInterval: + process.env.REDIS_KEEP_ALIVE && !isNaN(parseInt(process.env.REDIS_KEEP_ALIVE, 10)) + ? parseInt(process.env.REDIS_KEEP_ALIVE, 10) + : undefined // Add Redis protocol-level pings }) await redisClient.connect() @@ -226,8 +230,12 @@ class Redis_VectorStores implements INode { keepAlive: process.env.REDIS_KEEP_ALIVE && !isNaN(parseInt(process.env.REDIS_KEEP_ALIVE, 10)) ? parseInt(process.env.REDIS_KEEP_ALIVE, 10) - : undefined // milliseconds - } + : undefined + }, + pingInterval: + process.env.REDIS_KEEP_ALIVE && !isNaN(parseInt(process.env.REDIS_KEEP_ALIVE, 10)) + ? parseInt(process.env.REDIS_KEEP_ALIVE, 10) + : undefined // Add Redis protocol-level pings }) const storeConfig: RedisVectorStoreConfig = { diff --git a/packages/components/nodes/vectorstores/Supabase/Supabase.ts b/packages/components/nodes/vectorstores/Supabase/Supabase.ts index 25f379246..abe023309 100644 --- a/packages/components/nodes/vectorstores/Supabase/Supabase.ts +++ b/packages/components/nodes/vectorstores/Supabase/Supabase.ts @@ -3,11 +3,12 @@ import { v4 as uuidv4 } from 'uuid' import { createClient } from '@supabase/supabase-js' import { Document } from '@langchain/core/documents' import { Embeddings } from '@langchain/core/embeddings' -import { SupabaseVectorStore, SupabaseLibArgs, SupabaseFilterRPCCall } from '@langchain/community/vectorstores/supabase' +import { SupabaseVectorStore, SupabaseLibArgs } from '@langchain/community/vectorstores/supabase' import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface' -import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' +import { getBaseClasses, getCredentialData, getCredentialParam, parseJsonBody } from '../../../src/utils' import { addMMRInputParams, resolveVectorStoreOrRetriever } from '../VectorStoreUtils' import { index } from '../../../src/indexing' +import { FilterParser } from './filterParser' class Supabase_VectorStores implements INode { label: string @@ -78,7 +79,8 @@ class Supabase_VectorStores implements INode { name: 'supabaseMetadataFilter', type: 'json', optional: true, - additionalParams: true + additionalParams: true, + acceptVariable: true }, { label: 'Supabase RPC Filter', @@ -228,16 +230,13 @@ class Supabase_VectorStores implements INode { } if (supabaseMetadataFilter) { - const metadatafilter = typeof supabaseMetadataFilter === 'object' ? supabaseMetadataFilter : JSON.parse(supabaseMetadataFilter) + const metadatafilter = + typeof supabaseMetadataFilter === 'object' ? supabaseMetadataFilter : parseJsonBody(supabaseMetadataFilter) obj.filter = metadatafilter } if (supabaseRPCFilter) { - const funcString = `return rpc.${supabaseRPCFilter};` - const funcFilter = new Function('rpc', funcString) - obj.filter = (rpc: SupabaseFilterRPCCall) => { - return funcFilter(rpc) - } + obj.filter = FilterParser.parseFilterString(supabaseRPCFilter) } const vectorStore = await SupabaseVectorStore.fromExistingIndex(embeddings, obj) diff --git a/packages/components/nodes/vectorstores/Supabase/filterParser.ts b/packages/components/nodes/vectorstores/Supabase/filterParser.ts new file mode 100644 index 000000000..c1b16b4a9 --- /dev/null +++ b/packages/components/nodes/vectorstores/Supabase/filterParser.ts @@ -0,0 +1,203 @@ +/** + * This parser safely handles Supabase filter strings without allowing arbitrary code execution + */ +export class FilterParser { + private static readonly ALLOWED_METHODS = ['filter', 'order', 'limit', 'range', 'single', 'maybeSingle'] + private static readonly ALLOWED_OPERATORS = [ + 'eq', + 'neq', + 'gt', + 'gte', + 'lt', + 'lte', + 'like', + 'ilike', + 'is', + 'in', + 'cs', + 'cd', + 'sl', + 'sr', + 'nxl', + 'nxr', + 'adj', + 'ov', + 'fts', + 'plfts', + 'phfts', + 'wfts' + ] + + /** + * Safely parse a Supabase RPC filter string into a function + * @param filterString The filter string (e.g., 'filter("metadata->a::int", "gt", 5).filter("metadata->c::int", "gt", 7)') + * @returns A function that can be applied to an RPC object + * @throws Error if the filter string contains unsafe patterns + */ + static parseFilterString(filterString: string): (rpc: any) => any { + try { + // Clean and validate the filter string + const cleanedFilter = this.cleanFilterString(filterString) + + // Parse the filter chain + const filterChain = this.parseFilterChain(cleanedFilter) + + // Build the safe filter function + return this.buildFilterFunction(filterChain) + } catch (error) { + throw new Error(`Failed to parse Supabase filter: ${error.message}`) + } + } + + private static cleanFilterString(filter: string): string { + // Remove comments and normalize whitespace + filter = filter.replace(/\/\/.*$/gm, '').replace(/\/\*[\s\S]*?\*\//g, '') + filter = filter.replace(/\s+/g, ' ').trim() + + // Remove trailing semicolon if present + if (filter.endsWith(';')) { + filter = filter.slice(0, -1).trim() + } + + return filter + } + + private static parseFilterChain(filter: string): Array<{ method: string; args: any[] }> { + const chain: Array<{ method: string; args: any[] }> = [] + + // Split on method calls (e.g., .filter, .order, etc.) + const methodPattern = /\.?(\w+)\s*\((.*?)\)(?=\s*(?:\.|$))/g + let match + + while ((match = methodPattern.exec(filter)) !== null) { + const method = match[1] + const argsString = match[2] + + // Validate method name + if (!this.ALLOWED_METHODS.includes(method)) { + throw new Error(`Disallowed method: ${method}`) + } + + // Parse arguments safely + const args = this.parseArguments(argsString) + + // Additional validation for filter method + if (method === 'filter' && args.length >= 2) { + const operator = args[1] + if (typeof operator === 'string' && !this.ALLOWED_OPERATORS.includes(operator)) { + throw new Error(`Disallowed filter operator: ${operator}`) + } + } + + chain.push({ method, args }) + } + + if (chain.length === 0) { + throw new Error('No valid filter methods found') + } + + return chain + } + + private static parseArguments(argsString: string): any[] { + if (!argsString.trim()) { + return [] + } + + const args: any[] = [] + let current = '' + let inString = false + let stringChar = '' + let depth = 0 + + for (let i = 0; i < argsString.length; i++) { + const char = argsString[i] + + if (!inString && (char === '"' || char === "'")) { + inString = true + stringChar = char + current += char + } else if (inString && char === stringChar && argsString[i - 1] !== '\\') { + inString = false + current += char + } else if (!inString) { + if (char === '(' || char === '[' || char === '{') { + depth++ + current += char + } else if (char === ')' || char === ']' || char === '}') { + depth-- + current += char + } else if (char === ',' && depth === 0) { + args.push(this.parseArgument(current.trim())) + current = '' + continue + } else { + current += char + } + } else { + current += char + } + } + + if (current.trim()) { + args.push(this.parseArgument(current.trim())) + } + + return args + } + + private static parseArgument(arg: string): any { + arg = arg.trim() + + // Handle strings + if ((arg.startsWith('"') && arg.endsWith('"')) || (arg.startsWith("'") && arg.endsWith("'"))) { + return arg.slice(1, -1) + } + + // Handle numbers + if (arg.match(/^-?\d+(\.\d+)?$/)) { + return parseFloat(arg) + } + + // Handle booleans + if (arg === 'true') return true + if (arg === 'false') return false + if (arg === 'null') return null + + // Handle arrays (basic support) + if (arg.startsWith('[') && arg.endsWith(']')) { + const arrayContent = arg.slice(1, -1).trim() + if (!arrayContent) return [] + + // Simple array parsing - just split by comma and parse each element + return arrayContent.split(',').map((item) => this.parseArgument(item.trim())) + } + + // For everything else, treat as string (but validate it doesn't contain dangerous characters) + if (arg.includes('require') || arg.includes('process') || arg.includes('eval') || arg.includes('Function')) { + throw new Error(`Potentially dangerous argument: ${arg}`) + } + + return arg + } + + private static buildFilterFunction(chain: Array<{ method: string; args: any[] }>): (rpc: any) => any { + return (rpc: any) => { + let result = rpc + + for (const { method, args } of chain) { + if (typeof result[method] !== 'function') { + throw new Error(`Method ${method} is not available on the RPC object`) + } + + try { + result = result[method](...args) + } catch (error) { + throw new Error(`Failed to call ${method}: ${error.message}`) + } + } + + return result + } + } +} diff --git a/packages/components/nodes/vectorstores/Upstash/Upstash.ts b/packages/components/nodes/vectorstores/Upstash/Upstash.ts index d126daa86..2886afcfa 100644 --- a/packages/components/nodes/vectorstores/Upstash/Upstash.ts +++ b/packages/components/nodes/vectorstores/Upstash/Upstash.ts @@ -80,7 +80,8 @@ class Upstash_VectorStores implements INode { name: 'upstashMetadataFilter', type: 'string', optional: true, - additionalParams: true + additionalParams: true, + acceptVariable: true }, { label: 'Top K', diff --git a/packages/components/nodes/vectorstores/Vectara/Vectara.ts b/packages/components/nodes/vectorstores/Vectara/Vectara.ts index d7260d109..ae752602f 100644 --- a/packages/components/nodes/vectorstores/Vectara/Vectara.ts +++ b/packages/components/nodes/vectorstores/Vectara/Vectara.ts @@ -65,7 +65,8 @@ class Vectara_VectorStores implements INode { 'Filter to apply to Vectara metadata. Refer to the documentation on how to use Vectara filters with Flowise.', type: 'string', additionalParams: true, - optional: true + optional: true, + acceptVariable: true }, { label: 'Sentences Before', @@ -191,11 +192,12 @@ class Vectara_VectorStores implements INode { } else { files = [fileName] } + const orgId = options.orgId const chatflowid = options.chatflowid for (const file of files) { if (!file) continue - const fileData = await getFileFromStorage(file, chatflowid) + const fileData = await getFileFromStorage(file, orgId, chatflowid) const blob = new Blob([fileData]) vectaraFiles.push({ blob: blob, fileName: getFileName(file) }) } diff --git a/packages/components/nodes/vectorstores/Vectara/Vectara_Upload.ts b/packages/components/nodes/vectorstores/Vectara/Vectara_Upload.ts new file mode 100644 index 000000000..e205c5e6d --- /dev/null +++ b/packages/components/nodes/vectorstores/Vectara/Vectara_Upload.ts @@ -0,0 +1,197 @@ +import { VectaraStore, VectaraLibArgs, VectaraFilter, VectaraContextConfig, VectaraFile } from '@langchain/community/vectorstores/vectara' +import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' +import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' +import { getFileFromStorage } from '../../../src' + +class VectaraUpload_VectorStores implements INode { + label: string + name: string + version: number + description: string + type: string + icon: string + category: string + badge: string + baseClasses: string[] + inputs: INodeParams[] + credential: INodeParams + outputs: INodeOutputsValue[] + + constructor() { + this.label = 'Vectara Upload File' + this.name = 'vectaraUpload' + this.version = 1.0 + this.type = 'Vectara' + this.icon = 'vectara.png' + this.category = 'Vector Stores' + this.description = 'Upload files to Vectara' + this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'DEPRECATING' + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + credentialNames: ['vectaraApi'] + } + this.inputs = [ + { + label: 'File', + name: 'file', + description: + 'File to upload to Vectara. Supported file types: https://docs.vectara.com/docs/api-reference/indexing-apis/file-upload/file-upload-filetypes', + type: 'file' + }, + { + label: 'Metadata Filter', + name: 'filter', + description: + 'Filter to apply to Vectara metadata. Refer to the documentation on how to use Vectara filters with Flowise.', + type: 'string', + additionalParams: true, + optional: true + }, + { + label: 'Sentences Before', + name: 'sentencesBefore', + description: 'Number of sentences to fetch before the matched sentence. Defaults to 2.', + type: 'number', + additionalParams: true, + optional: true + }, + { + label: 'Sentences After', + name: 'sentencesAfter', + description: 'Number of sentences to fetch after the matched sentence. Defaults to 2.', + type: 'number', + additionalParams: true, + optional: true + }, + { + label: 'Lambda', + name: 'lambda', + description: + 'Improves retrieval accuracy by adjusting the balance (from 0 to 1) between neural search and keyword-based search factors.', + type: 'number', + additionalParams: true, + optional: true + }, + { + label: 'Top K', + name: 'topK', + description: 'Number of top results to fetch. Defaults to 4', + placeholder: '4', + type: 'number', + additionalParams: true, + optional: true + } + ] + this.outputs = [ + { + label: 'Vectara Retriever', + name: 'retriever', + baseClasses: this.baseClasses + }, + { + label: 'Vectara Vector Store', + name: 'vectorStore', + baseClasses: [this.type, ...getBaseClasses(VectaraStore)] + } + ] + } + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const apiKey = getCredentialParam('apiKey', credentialData, nodeData) + const customerId = getCredentialParam('customerID', credentialData, nodeData) + const corpusId = getCredentialParam('corpusID', credentialData, nodeData).split(',') + + const fileBase64 = nodeData.inputs?.file + const vectaraMetadataFilter = nodeData.inputs?.filter as string + const sentencesBefore = nodeData.inputs?.sentencesBefore as number + const sentencesAfter = nodeData.inputs?.sentencesAfter as number + const lambda = nodeData.inputs?.lambda as number + const output = nodeData.outputs?.output as string + const topK = nodeData.inputs?.topK as string + const k = topK ? parseInt(topK, 10) : 4 + + const vectaraArgs: VectaraLibArgs = { + apiKey: apiKey, + customerId: customerId, + corpusId: corpusId, + source: 'flowise' + } + + const vectaraFilter: VectaraFilter = {} + if (vectaraMetadataFilter) vectaraFilter.filter = vectaraMetadataFilter + if (lambda) vectaraFilter.lambda = lambda + + const vectaraContextConfig: VectaraContextConfig = {} + if (sentencesBefore) vectaraContextConfig.sentencesBefore = sentencesBefore + if (sentencesAfter) vectaraContextConfig.sentencesAfter = sentencesAfter + vectaraFilter.contextConfig = vectaraContextConfig + + let files: string[] = [] + const vectaraFiles: VectaraFile[] = [] + + if (fileBase64.startsWith('FILE-STORAGE::')) { + const fileName = fileBase64.replace('FILE-STORAGE::', '') + if (fileName.startsWith('[') && fileName.endsWith(']')) { + files = JSON.parse(fileName) + } else { + files = [fileName] + } + const orgId = options.orgId + const chatflowid = options.chatflowid + + for (const file of files) { + const fileData = await getFileFromStorage(file, orgId, chatflowid) + const blob = new Blob([fileData]) + vectaraFiles.push({ blob: blob, fileName: getFileName(file) }) + } + } else { + if (fileBase64.startsWith('[') && fileBase64.endsWith(']')) { + files = JSON.parse(fileBase64) + } else { + files = [fileBase64] + } + + for (const file of files) { + const splitDataURI = file.split(',') + splitDataURI.pop() + const bf = Buffer.from(splitDataURI.pop() || '', 'base64') + const blob = new Blob([bf]) + vectaraFiles.push({ blob: blob, fileName: getFileName(file) }) + } + } + + const vectorStore = new VectaraStore(vectaraArgs) + await vectorStore.addFiles(vectaraFiles) + + if (output === 'retriever') { + const retriever = vectorStore.asRetriever(k, vectaraFilter) + return retriever + } else if (output === 'vectorStore') { + ;(vectorStore as any).k = k + return vectorStore + } + return vectorStore + } +} + +const getFileName = (fileBase64: string) => { + let fileNames = [] + if (fileBase64.startsWith('[') && fileBase64.endsWith(']')) { + const files = JSON.parse(fileBase64) + for (const file of files) { + const splitDataURI = file.split(',') + const filename = splitDataURI[splitDataURI.length - 1].split(':')[1] + fileNames.push(filename) + } + return fileNames.join(', ') + } else { + const splitDataURI = fileBase64.split(',') + const filename = splitDataURI[splitDataURI.length - 1].split(':')[1] + return filename + } +} + +module.exports = { nodeClass: VectaraUpload_VectorStores } diff --git a/packages/components/nodes/vectorstores/Weaviate/Weaviate.ts b/packages/components/nodes/vectorstores/Weaviate/Weaviate.ts index ae2c0164f..eb61f0205 100644 --- a/packages/components/nodes/vectorstores/Weaviate/Weaviate.ts +++ b/packages/components/nodes/vectorstores/Weaviate/Weaviate.ts @@ -4,7 +4,7 @@ import { WeaviateLibArgs, WeaviateStore } from '@langchain/weaviate' import { Document } from '@langchain/core/documents' import { Embeddings } from '@langchain/core/embeddings' import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface' -import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' +import { getBaseClasses, getCredentialData, getCredentialParam, normalizeKeysRecursively, parseJsonBody } from '../../../src/utils' import { addMMRInputParams, resolveVectorStoreOrRetriever } from '../VectorStoreUtils' import { index } from '../../../src/indexing' import { VectorStore } from '@langchain/core/vectorstores' @@ -120,7 +120,8 @@ class Weaviate_VectorStores implements INode { name: 'weaviateFilter', type: 'json', additionalParams: true, - optional: true + optional: true, + acceptVariable: true } ] addMMRInputParams(this.inputs) @@ -175,7 +176,11 @@ class Weaviate_VectorStores implements INode { const finalDocs = [] for (let i = 0; i < flattenDocs.length; i += 1) { if (flattenDocs[i] && flattenDocs[i].pageContent) { - finalDocs.push(new Document(flattenDocs[i])) + const doc = { ...flattenDocs[i] } + if (doc.metadata) { + doc.metadata = normalizeKeysRecursively(doc.metadata) + } + finalDocs.push(new Document(doc)) } } @@ -289,7 +294,7 @@ class Weaviate_VectorStores implements INode { if (weaviateTextKey) obj.textKey = weaviateTextKey if (weaviateMetadataKeys) obj.metadataKeys = JSON.parse(weaviateMetadataKeys.replace(/\s/g, '')) if (weaviateFilter) { - weaviateFilter = typeof weaviateFilter === 'object' ? weaviateFilter : JSON.parse(weaviateFilter) + weaviateFilter = typeof weaviateFilter === 'object' ? weaviateFilter : parseJsonBody(weaviateFilter) } const vectorStore = (await WeaviateStore.fromExistingIndex(embeddings, obj)) as unknown as VectorStore diff --git a/packages/components/nodes/vectorstores/Zep/Zep.ts b/packages/components/nodes/vectorstores/Zep/Zep.ts index 5caa8e515..099dcaa11 100644 --- a/packages/components/nodes/vectorstores/Zep/Zep.ts +++ b/packages/components/nodes/vectorstores/Zep/Zep.ts @@ -4,7 +4,7 @@ import { ZepVectorStore, IZepConfig } from '@langchain/community/vectorstores/ze import { Embeddings } from '@langchain/core/embeddings' import { Document } from '@langchain/core/documents' import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface' -import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' +import { getBaseClasses, getCredentialData, getCredentialParam, parseJsonBody } from '../../../src/utils' import { addMMRInputParams, resolveVectorStoreOrRetriever } from '../VectorStoreUtils' class Zep_VectorStores implements INode { @@ -69,7 +69,8 @@ class Zep_VectorStores implements INode { name: 'zepMetadataFilter', type: 'json', optional: true, - additionalParams: true + additionalParams: true, + acceptVariable: true }, { label: 'Embedding Dimension', @@ -158,7 +159,7 @@ class Zep_VectorStores implements INode { } if (apiKey) zepConfig.apiKey = apiKey if (zepMetadataFilter) { - const metadatafilter = typeof zepMetadataFilter === 'object' ? zepMetadataFilter : JSON.parse(zepMetadataFilter) + const metadatafilter = typeof zepMetadataFilter === 'object' ? zepMetadataFilter : parseJsonBody(zepMetadataFilter) zepConfig.filter = metadatafilter } diff --git a/packages/components/nodes/vectorstores/ZepCloud/ZepCloud.ts b/packages/components/nodes/vectorstores/ZepCloud/ZepCloud.ts index c7e68f05f..6adbe86da 100644 --- a/packages/components/nodes/vectorstores/ZepCloud/ZepCloud.ts +++ b/packages/components/nodes/vectorstores/ZepCloud/ZepCloud.ts @@ -3,7 +3,7 @@ import { ZepClient } from '@getzep/zep-cloud' import { IZepConfig, ZepVectorStore } from '@getzep/zep-cloud/langchain' import { Document } from 'langchain/document' import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface' -import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' +import { getBaseClasses, getCredentialData, getCredentialParam, parseJsonBody } from '../../../src/utils' import { addMMRInputParams, resolveVectorStoreOrRetriever } from '../VectorStoreUtils' import { FakeEmbeddings } from 'langchain/embeddings/fake' import { Embeddings } from '@langchain/core/embeddings' @@ -59,7 +59,8 @@ class Zep_CloudVectorStores implements INode { name: 'zepMetadataFilter', type: 'json', optional: true, - additionalParams: true + additionalParams: true, + acceptVariable: true }, { label: 'Top K', @@ -128,7 +129,7 @@ class Zep_CloudVectorStores implements INode { collectionName: zepCollection } if (zepMetadataFilter) { - zepConfig.filter = typeof zepMetadataFilter === 'object' ? zepMetadataFilter : JSON.parse(zepMetadataFilter) + zepConfig.filter = typeof zepMetadataFilter === 'object' ? zepMetadataFilter : parseJsonBody(zepMetadataFilter) } zepConfig.client = new ZepClient({ apiKey: apiKey diff --git a/packages/components/package.json b/packages/components/package.json index 44246c650..e379b6e7a 100644 --- a/packages/components/package.json +++ b/packages/components/package.json @@ -1,6 +1,6 @@ { "name": "flowise-components", - "version": "3.0.0", + "version": "3.0.11", "description": "Flowiseai Components", "main": "dist/src/index", "types": "dist/src/index.d.ts", @@ -8,7 +8,10 @@ "build": "tsc && gulp", "lint": "eslint . --ext ts,tsx --report-unused-disable-directives --max-warnings 0", "clean": "rimraf dist", - "nuke": "rimraf dist node_modules .turbo" + "nuke": "rimraf dist node_modules .turbo", + "test": "jest", + "test:watch": "jest --watch", + "test:coverage": "jest --coverage" }, "keywords": [], "homepage": "https://flowiseai.com", @@ -22,12 +25,15 @@ "@arizeai/openinference-instrumentation-langchain": "^2.0.0", "@aws-sdk/client-bedrock-runtime": "3.422.0", "@aws-sdk/client-dynamodb": "^3.360.0", - "@aws-sdk/client-s3": "^3.427.0", + "@aws-sdk/client-kendra": "^3.750.0", + "@aws-sdk/client-s3": "^3.844.0", "@aws-sdk/client-secrets-manager": "^3.699.0", + "@aws-sdk/client-sns": "^3.699.0", "@datastax/astra-db-ts": "1.5.0", "@dqbd/tiktoken": "^1.0.21", - "@e2b/code-interpreter": "^0.0.5", + "@e2b/code-interpreter": "^1.5.1", "@elastic/elasticsearch": "^8.9.0", + "@elevenlabs/elevenlabs-js": "^2.8.0", "@flowiseai/nodevm": "^3.9.25", "@getzep/zep-cloud": "~1.0.7", "@getzep/zep-js": "^0.9.0", @@ -36,28 +42,29 @@ "@google-ai/generativelanguage": "^2.5.0", "@google-cloud/storage": "^7.15.2", "@google/generative-ai": "^0.24.0", - "@huggingface/inference": "^2.6.1", - "@langchain/anthropic": "0.3.14", - "@langchain/aws": "0.1.4", + "@grpc/grpc-js": "^1.10.10", + "@huggingface/inference": "^4.13.2", + "@langchain/anthropic": "0.3.33", + "@langchain/aws": "^0.1.11", "@langchain/baidu-qianfan": "^0.1.0", "@langchain/cohere": "^0.0.7", - "@langchain/community": "^0.3.29", - "@langchain/core": "0.3.37", + "@langchain/community": "^0.3.47", + "@langchain/core": "0.3.61", "@langchain/exa": "^0.0.5", "@langchain/google-genai": "0.2.3", - "@langchain/google-vertexai": "^0.2.0", + "@langchain/google-vertexai": "^0.2.10", "@langchain/groq": "0.1.2", "@langchain/langgraph": "^0.0.22", "@langchain/mistralai": "^0.2.0", "@langchain/mongodb": "^0.0.1", "@langchain/ollama": "0.2.0", - "@langchain/openai": "0.5.6", + "@langchain/openai": "0.6.3", "@langchain/pinecone": "^0.1.3", "@langchain/qdrant": "^0.0.5", "@langchain/weaviate": "^0.0.1", "@langchain/xai": "^0.0.1", "@mem0/community": "^0.0.1", - "@mendable/firecrawl-js": "^0.0.28", + "@mendable/firecrawl-js": "^1.18.2", "@mistralai/mistralai": "0.1.3", "@modelcontextprotocol/sdk": "^1.10.1", "@modelcontextprotocol/server-brave-search": "^0.6.2", @@ -67,6 +74,20 @@ "@modelcontextprotocol/server-slack": "^2025.1.17", "@notionhq/client": "^2.2.8", "@opensearch-project/opensearch": "^1.2.0", + "@opentelemetry/api": "1.9.0", + "@opentelemetry/auto-instrumentations-node": "^0.52.0", + "@opentelemetry/core": "1.27.0", + "@opentelemetry/exporter-metrics-otlp-grpc": "0.54.0", + "@opentelemetry/exporter-metrics-otlp-http": "0.54.0", + "@opentelemetry/exporter-metrics-otlp-proto": "0.54.0", + "@opentelemetry/exporter-trace-otlp-grpc": "0.54.0", + "@opentelemetry/exporter-trace-otlp-http": "0.54.0", + "@opentelemetry/exporter-trace-otlp-proto": "0.54.0", + "@opentelemetry/resources": "1.27.0", + "@opentelemetry/sdk-metrics": "1.27.0", + "@opentelemetry/sdk-node": "^0.54.0", + "@opentelemetry/sdk-trace-base": "1.27.0", + "@opentelemetry/semantic-conventions": "1.27.0", "@pinecone-database/pinecone": "4.0.0", "@qdrant/js-client-rest": "^1.9.0", "@stripe/agent-toolkit": "^0.1.20", @@ -78,7 +99,7 @@ "@zilliz/milvus2-sdk-node": "^2.2.24", "apify-client": "^2.7.1", "assemblyai": "^4.2.2", - "axios": "1.7.9", + "axios": "1.12.0", "cheerio": "^1.0.0-rc.12", "chromadb": "^1.10.0", "cohere-ai": "^7.7.5", @@ -93,12 +114,14 @@ "express": "^4.17.3", "faiss-node": "^0.5.1", "fast-json-patch": "^3.1.1", - "form-data": "^4.0.0", + "form-data": "^4.0.4", "google-auth-library": "^9.4.0", "graphql": "^16.6.0", "html-to-text": "^9.0.5", "ioredis": "^5.3.2", + "ipaddr.js": "^2.2.0", "jsdom": "^22.1.0", + "json5": "2.2.3", "jsonpointer": "^5.0.1", "jsonrepair": "^3.11.1", "langchain": "^0.3.5", @@ -106,7 +129,7 @@ "langfuse-langchain": "^3.3.4", "langsmith": "0.1.6", "langwatch": "^0.1.1", - "linkifyjs": "^4.1.1", + "linkifyjs": "^4.3.2", "llamaindex": "^0.3.13", "lodash": "^4.17.21", "lunary": "^0.7.12", @@ -120,23 +143,27 @@ "node-html-markdown": "^1.3.0", "notion-to-md": "^3.1.1", "object-hash": "^3.0.0", + "officeparser": "5.1.1", "ollama": "^0.5.11", "openai": "^4.96.0", "papaparse": "^5.4.1", "pdf-parse": "^1.1.1", - "pdfjs-dist": "^3.7.107", + "pdfjs-dist": "^5.3.93", "pg": "^8.11.2", "playwright": "^1.35.0", "puppeteer": "^20.7.1", "pyodide": ">=0.21.0-alpha.2", "redis": "^4.6.7", + "remove-markdown": "^0.6.2", "replicate": "^0.31.1", "sanitize-filename": "^1.6.3", "srt-parser-2": "^1.2.3", + "supergateway": "3.0.1", "typeorm": "^0.3.6", "weaviate-ts-client": "^1.1.0", "winston": "^3.9.0", "ws": "^8.18.0", + "xlsx": "0.18.5", "zod": "3.22.4", "zod-to-json-schema": "^3.21.4" }, @@ -144,15 +171,17 @@ "@swc/core": "^1.3.99", "@types/crypto-js": "^4.1.1", "@types/gulp": "4.0.9", - "@types/lodash": "^4.14.202", + "@types/jest": "^29.5.14", + "@types/lodash": "^4.17.20", "@types/node-fetch": "2.6.2", "@types/object-hash": "^3.0.2", "@types/papaparse": "^5.3.15", "@types/pg": "^8.10.2", "@types/ws": "^8.5.3", - "babel-register": "^6.26.0", "gulp": "^4.0.2", + "jest": "^29.7.0", "rimraf": "^5.0.5", + "ts-jest": "^29.3.2", "tsc-watch": "^6.0.4", "tslib": "^2.6.2", "typescript": "^5.4.5" diff --git a/packages/components/src/Interface.Evaluation.ts b/packages/components/src/Interface.Evaluation.ts new file mode 100644 index 000000000..1199833d2 --- /dev/null +++ b/packages/components/src/Interface.Evaluation.ts @@ -0,0 +1,43 @@ +// Evaluation Related Interfaces +export interface IDataset { + id: string + name: string + createdDate: Date + updatedDate: Date +} +export interface IDatasetRow { + id: string + datasetId: string + input: string + output: string + updatedDate: Date +} + +export enum EvaluationStatus { + PENDING = 'pending', + COMPLETED = 'completed' +} +export interface IEvaluation { + id: string + name: string + chatflowId: string + chatflowName: string + datasetId: string + datasetName: string + evaluationType: string + average_metrics: string + status: string + runDate: Date +} + +export interface IEvaluationRun { + id: string + evaluationId: string + input: string + expectedOutput: string + actualOutput: string + metrics: string + runDate: Date + reasoning: string + score: number +} diff --git a/packages/components/src/Interface.ts b/packages/components/src/Interface.ts index 6fd3d884d..daa8ef79e 100644 --- a/packages/components/src/Interface.ts +++ b/packages/components/src/Interface.ts @@ -134,6 +134,7 @@ export interface INodeProperties { documentation?: string color?: string hint?: string + warning?: string } export interface INode extends INodeProperties { @@ -414,11 +415,14 @@ export interface IVisionChatModal { revertToOriginalModel(): void setMultiModalOption(multiModalOption: IMultiModalOption): void } + export interface IStateWithMessages extends ICommonObject { messages: BaseMessage[] [key: string]: any } +export * from './Interface.Evaluation' + export interface IServerSideEventStreamer { streamStartEvent(chatId: string, data: any): void streamTokenEvent(chatId: string, data: string): void @@ -438,6 +442,9 @@ export interface IServerSideEventStreamer { streamAbortEvent(chatId: string): void streamEndEvent(chatId: string): void streamUsageMetadataEvent(chatId: string, data: any): void + streamTTSStartEvent(chatId: string, chatMessageId: string, format: string): void + streamTTSDataEvent(chatId: string, chatMessageId: string, audioChunk: string): void + streamTTSEndEvent(chatId: string, chatMessageId: string): void } export enum FollowUpPromptProvider { diff --git a/packages/components/src/MetricsLogger.ts b/packages/components/src/MetricsLogger.ts new file mode 100644 index 000000000..662fc9e27 --- /dev/null +++ b/packages/components/src/MetricsLogger.ts @@ -0,0 +1,135 @@ +import { BaseTracer, Run } from '@langchain/core/tracers/base' +import { Logger } from 'winston' +import { AgentRun, elapsed, tryJsonStringify } from './handler' + +export class MetricsLogger extends BaseTracer { + name = 'console_callback_handler' as const + logger: Logger + orgId?: string + + protected persistRun(_run: Run) { + return Promise.resolve() + } + + constructor(logger: Logger, orgId?: string) { + super() + this.logger = logger + this.orgId = orgId + } + + // utility methods + + getParents(run: Run) { + const parents: Run[] = [] + let currentRun = run + while (currentRun.parent_run_id) { + const parent = this.runMap.get(currentRun.parent_run_id) + if (parent) { + parents.push(parent) + currentRun = parent + } else { + break + } + } + return parents + } + + getBreadcrumbs(run: Run) { + const parents = this.getParents(run).reverse() + const string = [...parents, run] + .map((parent) => { + const name = `${parent.execution_order}:${parent.run_type}:${parent.name}` + return name + }) + .join(' > ') + return string + } + + // logging methods + + onChainStart(run: Run) { + const crumbs = this.getBreadcrumbs(run) + this.logger.verbose( + `[${this.orgId}]: [chain/start] [${crumbs}] Entering Chain run with input: ${tryJsonStringify(run.inputs, '[inputs]')}` + ) + } + + onChainEnd(run: Run) { + const crumbs = this.getBreadcrumbs(run) + this.logger.verbose( + `[${this.orgId}]: [chain/end] [${crumbs}] [${elapsed(run)}] Exiting Chain run with output: ${tryJsonStringify( + run.outputs, + '[outputs]' + )}` + ) + } + + onChainError(run: Run) { + const crumbs = this.getBreadcrumbs(run) + this.logger.verbose( + `[${this.orgId}]: [chain/error] [${crumbs}] [${elapsed(run)}] Chain run errored with error: ${tryJsonStringify( + run.error, + '[error]' + )}` + ) + } + + onLLMStart(run: Run) { + const crumbs = this.getBreadcrumbs(run) + const inputs = 'prompts' in run.inputs ? { prompts: (run.inputs.prompts as string[]).map((p) => p.trim()) } : run.inputs + this.logger.verbose(`[${this.orgId}]: [llm/start] [${crumbs}] Entering LLM run with input: ${tryJsonStringify(inputs, '[inputs]')}`) + } + + onLLMEnd(run: Run) { + const crumbs = this.getBreadcrumbs(run) + this.logger.verbose( + `[${this.orgId}]: [llm/end] [${crumbs}] [${elapsed(run)}] Exiting LLM run with output: ${tryJsonStringify( + run.outputs, + '[response]' + )}` + ) + } + + onLLMError(run: Run) { + const crumbs = this.getBreadcrumbs(run) + this.logger.verbose( + `[${this.orgId}]: [llm/error] [${crumbs}] [${elapsed(run)}] LLM run errored with error: ${tryJsonStringify( + run.error, + '[error]' + )}` + ) + } + + onToolStart(run: Run) { + const crumbs = this.getBreadcrumbs(run) + this.logger.verbose(`[${this.orgId}]: [tool/start] [${crumbs}] Entering Tool run with input: "${run.inputs.input?.trim()}"`) + } + + onToolEnd(run: Run) { + const crumbs = this.getBreadcrumbs(run) + this.logger.verbose( + `[${this.orgId}]: [tool/end] [${crumbs}] [${elapsed(run)}] Exiting Tool run with output: "${run.outputs?.output?.trim()}"` + ) + } + + onToolError(run: Run) { + const crumbs = this.getBreadcrumbs(run) + this.logger.verbose( + `[${this.orgId}]: [tool/error] [${crumbs}] [${elapsed(run)}] Tool run errored with error: ${tryJsonStringify( + run.error, + '[error]' + )}` + ) + } + + onAgentAction(run: Run) { + const agentRun = run as AgentRun + const crumbs = this.getBreadcrumbs(run) + this.logger.verbose( + `[${this.orgId}]: [agent/action] [${crumbs}] Agent selected action: ${tryJsonStringify( + agentRun.actions[agentRun.actions.length - 1], + '[action]' + )}` + ) + } +} diff --git a/packages/components/src/agentflowv2Generator.ts b/packages/components/src/agentflowv2Generator.ts index c5765f521..24d70c623 100644 --- a/packages/components/src/agentflowv2Generator.ts +++ b/packages/components/src/agentflowv2Generator.ts @@ -76,10 +76,10 @@ interface AgentToolConfig { interface NodeInputs { agentTools?: AgentToolConfig[] - selectedTool?: string + toolAgentflowSelectedTool?: string toolInputArgs?: Record[] - selectedToolConfig?: { - selectedTool: string + toolAgentflowSelectedToolConfig?: { + toolAgentflowSelectedTool: string } [key: string]: any } @@ -284,10 +284,10 @@ Now, select the ONLY tool that is needed to achieve the given task. You must onl if (Array.isArray(tools) && tools.length > 0) { selectedTools.push(...tools) - node.data.inputs.selectedTool = tools[0] + node.data.inputs.toolAgentflowSelectedTool = tools[0] node.data.inputs.toolInputArgs = [] - node.data.inputs.selectedToolConfig = { - selectedTool: tools[0] + node.data.inputs.toolAgentflowSelectedToolConfig = { + toolAgentflowSelectedTool: tools[0] } } } @@ -308,7 +308,7 @@ const _generateSelectedTools = async (config: Record, question: str const model = (await newToolNodeInstance.init(config.selectedChatModel, '', options)) as BaseChatModel // Create a parser to validate the output - const parser = StructuredOutputParser.fromZodSchema(ToolType) + const parser = StructuredOutputParser.fromZodSchema(ToolType as any) // Generate JSON schema from our Zod schema const formatInstructions = parser.getFormatInstructions() @@ -364,7 +364,7 @@ const generateNodesEdges = async (config: Record, question: string, const model = (await newToolNodeInstance.init(config.selectedChatModel, '', options)) as BaseChatModel // Create a parser to validate the output - const parser = StructuredOutputParser.fromZodSchema(NodesEdgesType) + const parser = StructuredOutputParser.fromZodSchema(NodesEdgesType as any) // Generate JSON schema from our Zod schema const formatInstructions = parser.getFormatInstructions() @@ -585,42 +585,87 @@ const _showHideOperation = (nodeData: Record, inputParam: Record groundValue.includes(val)) + if (displayType === 'show' && !hasIntersection) { + inputParam.display = false + } + if (displayType === 'hide' && hasIntersection) { + inputParam.display = false + } + } else if (typeof comparisonValue === 'string') { + // comparisonValue is string, groundValue is array - check if array contains the string + const matchFound = groundValue.some((val) => comparisonValue === val || new RegExp(comparisonValue).test(val)) + if (displayType === 'show' && !matchFound) { + inputParam.display = false + } + if (displayType === 'hide' && matchFound) { + inputParam.display = false + } + } else if (typeof comparisonValue === 'boolean' || typeof comparisonValue === 'number') { + // For boolean/number comparison with array, check if array contains the value + const matchFound = groundValue.includes(comparisonValue) + if (displayType === 'show' && !matchFound) { + inputParam.display = false + } + if (displayType === 'hide' && matchFound) { + inputParam.display = false + } + } else if (typeof comparisonValue === 'object') { + // For object comparison with array, use deep equality check + const matchFound = groundValue.some((val) => isEqual(comparisonValue, val)) + if (displayType === 'show' && !matchFound) { + inputParam.display = false + } + if (displayType === 'hide' && matchFound) { + inputParam.display = false + } } - if (displayType === 'hide' && comparisonValue.includes(groundValue)) { - inputParam.display = false - } - } else if (typeof comparisonValue === 'string') { - if (displayType === 'show' && !(comparisonValue === groundValue || new RegExp(comparisonValue).test(groundValue))) { - inputParam.display = false - } - if (displayType === 'hide' && (comparisonValue === groundValue || new RegExp(comparisonValue).test(groundValue))) { - inputParam.display = false - } - } else if (typeof comparisonValue === 'boolean') { - if (displayType === 'show' && comparisonValue !== groundValue) { - inputParam.display = false - } - if (displayType === 'hide' && comparisonValue === groundValue) { - inputParam.display = false - } - } else if (typeof comparisonValue === 'object') { - if (displayType === 'show' && !isEqual(comparisonValue, groundValue)) { - inputParam.display = false - } - if (displayType === 'hide' && isEqual(comparisonValue, groundValue)) { - inputParam.display = false - } - } else if (typeof comparisonValue === 'number') { - if (displayType === 'show' && comparisonValue !== groundValue) { - inputParam.display = false - } - if (displayType === 'hide' && comparisonValue === groundValue) { - inputParam.display = false + } else { + // Original logic for non-array groundValue + if (Array.isArray(comparisonValue)) { + if (displayType === 'show' && !comparisonValue.includes(groundValue)) { + inputParam.display = false + } + if (displayType === 'hide' && comparisonValue.includes(groundValue)) { + inputParam.display = false + } + } else if (typeof comparisonValue === 'string') { + if (displayType === 'show' && !(comparisonValue === groundValue || new RegExp(comparisonValue).test(groundValue))) { + inputParam.display = false + } + if (displayType === 'hide' && (comparisonValue === groundValue || new RegExp(comparisonValue).test(groundValue))) { + inputParam.display = false + } + } else if (typeof comparisonValue === 'boolean') { + if (displayType === 'show' && comparisonValue !== groundValue) { + inputParam.display = false + } + if (displayType === 'hide' && comparisonValue === groundValue) { + inputParam.display = false + } + } else if (typeof comparisonValue === 'object') { + if (displayType === 'show' && !isEqual(comparisonValue, groundValue)) { + inputParam.display = false + } + if (displayType === 'hide' && isEqual(comparisonValue, groundValue)) { + inputParam.display = false + } + } else if (typeof comparisonValue === 'number') { + if (displayType === 'show' && comparisonValue !== groundValue) { + inputParam.display = false + } + if (displayType === 'hide' && comparisonValue === groundValue) { + inputParam.display = false + } } } }) diff --git a/packages/components/src/agents.ts b/packages/components/src/agents.ts index 0bda4021c..cfc439280 100644 --- a/packages/components/src/agents.ts +++ b/packages/components/src/agents.ts @@ -28,6 +28,17 @@ import { getErrorMessage } from './error' export const SOURCE_DOCUMENTS_PREFIX = '\n\n----FLOWISE_SOURCE_DOCUMENTS----\n\n' export const ARTIFACTS_PREFIX = '\n\n----FLOWISE_ARTIFACTS----\n\n' +export const TOOL_ARGS_PREFIX = '\n\n----FLOWISE_TOOL_ARGS----\n\n' + +/** + * Utility function to format tool error messages with parameters for debugging + * @param errorMessage - The base error message + * @param params - The parameters that were passed to the tool + * @returns Formatted error message with tool arguments appended + */ +export const formatToolError = (errorMessage: string, params: any): string => { + return errorMessage + TOOL_ARGS_PREFIX + JSON.stringify(params) +} export type AgentFinish = { returnValues: Record @@ -444,9 +455,19 @@ export class AgentExecutor extends BaseChain { if (typeof toolOutput === 'string' && toolOutput.includes(ARTIFACTS_PREFIX)) { toolOutput = toolOutput.split(ARTIFACTS_PREFIX)[0] } + let toolInput + if (typeof toolOutput === 'string' && toolOutput.includes(TOOL_ARGS_PREFIX)) { + const splitArray = toolOutput.split(TOOL_ARGS_PREFIX) + toolOutput = splitArray[0] + try { + toolInput = JSON.parse(splitArray[1]) + } catch (e) { + console.error('Error parsing tool input from tool') + } + } usedTools.push({ tool: tool.name, - toolInput: action.toolInput as any, + toolInput: toolInput ?? (action.toolInput as any), toolOutput }) } else { @@ -502,6 +523,10 @@ export class AgentExecutor extends BaseChain { console.error('Error parsing source documents from tool') } } + if (typeof observation === 'string' && observation.includes(TOOL_ARGS_PREFIX)) { + const observationArray = observation.split(TOOL_ARGS_PREFIX) + observation = observationArray[0] + } return { action, observation: observation ?? '' } }) ) @@ -610,6 +635,10 @@ export class AgentExecutor extends BaseChain { const observationArray = observation.split(ARTIFACTS_PREFIX) observation = observationArray[0] } + if (typeof observation === 'string' && observation.includes(TOOL_ARGS_PREFIX)) { + const observationArray = observation.split(TOOL_ARGS_PREFIX) + observation = observationArray[0] + } } catch (e) { if (e instanceof ToolInputParsingException) { if (this.handleParsingErrors === true) { @@ -992,7 +1021,7 @@ export class JsonOutputToolsParser extends BaseLLMOutputParser const parsedToolCalls = [] if (!toolCalls) { - // @ts-expect-error name and arguemnts are defined by Object.defineProperty + // @ts-expect-error name and arguments are defined by Object.defineProperty const parsedToolCall: ParsedToolCall = { type: 'undefined', args: {} @@ -1018,7 +1047,7 @@ export class JsonOutputToolsParser extends BaseLLMOutputParser const clonedToolCalls = JSON.parse(JSON.stringify(toolCalls)) for (const toolCall of clonedToolCalls) { if (toolCall.function !== undefined) { - // @ts-expect-error name and arguemnts are defined by Object.defineProperty + // @ts-expect-error name and arguments are defined by Object.defineProperty const parsedToolCall: ParsedToolCall = { type: toolCall.function.name, args: JSON.parse(toolCall.function.arguments) diff --git a/packages/components/src/awsToolsUtils.ts b/packages/components/src/awsToolsUtils.ts new file mode 100644 index 000000000..46edafeff --- /dev/null +++ b/packages/components/src/awsToolsUtils.ts @@ -0,0 +1,65 @@ +import { ICommonObject, INodeData } from './Interface' +import { getCredentialData, getCredentialParam } from './utils' + +// AWS Regions constant +export const AWS_REGIONS = [ + { label: 'US East (N. Virginia) - us-east-1', name: 'us-east-1' }, + { label: 'US East (Ohio) - us-east-2', name: 'us-east-2' }, + { label: 'US West (N. California) - us-west-1', name: 'us-west-1' }, + { label: 'US West (Oregon) - us-west-2', name: 'us-west-2' }, + { label: 'Africa (Cape Town) - af-south-1', name: 'af-south-1' }, + { label: 'Asia Pacific (Hong Kong) - ap-east-1', name: 'ap-east-1' }, + { label: 'Asia Pacific (Mumbai) - ap-south-1', name: 'ap-south-1' }, + { label: 'Asia Pacific (Osaka) - ap-northeast-3', name: 'ap-northeast-3' }, + { label: 'Asia Pacific (Seoul) - ap-northeast-2', name: 'ap-northeast-2' }, + { label: 'Asia Pacific (Singapore) - ap-southeast-1', name: 'ap-southeast-1' }, + { label: 'Asia Pacific (Sydney) - ap-southeast-2', name: 'ap-southeast-2' }, + { label: 'Asia Pacific (Tokyo) - ap-northeast-1', name: 'ap-northeast-1' }, + { label: 'Canada (Central) - ca-central-1', name: 'ca-central-1' }, + { label: 'Europe (Frankfurt) - eu-central-1', name: 'eu-central-1' }, + { label: 'Europe (Ireland) - eu-west-1', name: 'eu-west-1' }, + { label: 'Europe (London) - eu-west-2', name: 'eu-west-2' }, + { label: 'Europe (Milan) - eu-south-1', name: 'eu-south-1' }, + { label: 'Europe (Paris) - eu-west-3', name: 'eu-west-3' }, + { label: 'Europe (Stockholm) - eu-north-1', name: 'eu-north-1' }, + { label: 'Middle East (Bahrain) - me-south-1', name: 'me-south-1' }, + { label: 'South America (Sรฃo Paulo) - sa-east-1', name: 'sa-east-1' } +] + +export const DEFAULT_AWS_REGION = 'us-east-1' + +// AWS Credentials interface +export interface AWSCredentials { + accessKeyId: string + secretAccessKey: string + sessionToken?: string +} + +/** + * Get AWS credentials from node data + * @param {INodeData} nodeData - Node data containing credential information + * @param {ICommonObject} options - Options containing appDataSource and databaseEntities + * @returns {Promise} - AWS credentials object + */ +export async function getAWSCredentials(nodeData: INodeData, options: ICommonObject): Promise { + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + + const accessKeyId = getCredentialParam('awsKey', credentialData, nodeData) + const secretAccessKey = getCredentialParam('awsSecret', credentialData, nodeData) + const sessionToken = getCredentialParam('awsSession', credentialData, nodeData) + + if (!accessKeyId || !secretAccessKey) { + throw new Error('AWS Access Key ID and Secret Access Key are required') + } + + const credentials: AWSCredentials = { + accessKeyId, + secretAccessKey + } + + if (sessionToken) { + credentials.sessionToken = sessionToken + } + + return credentials +} diff --git a/packages/components/src/followUpPrompts.ts b/packages/components/src/followUpPrompts.ts index ecfcfe825..72014c6d4 100644 --- a/packages/components/src/followUpPrompts.ts +++ b/packages/components/src/followUpPrompts.ts @@ -36,6 +36,7 @@ export const generateFollowUpPrompts = async ( model: providerConfig.modelName, temperature: parseFloat(`${providerConfig.temperature}`) }) + // @ts-ignore const structuredLLM = llm.withStructuredOutput(FollowUpPromptType) const structuredResponse = await structuredLLM.invoke(followUpPromptsPrompt) return structuredResponse @@ -55,7 +56,7 @@ export const generateFollowUpPrompts = async ( temperature: parseFloat(`${providerConfig.temperature}`) }) // use structured output parser because withStructuredOutput is not working - const parser = StructuredOutputParser.fromZodSchema(FollowUpPromptType) + const parser = StructuredOutputParser.fromZodSchema(FollowUpPromptType as any) const formatInstructions = parser.getFormatInstructions() const prompt = PromptTemplate.fromTemplate(` ${providerConfig.prompt} @@ -70,24 +71,13 @@ export const generateFollowUpPrompts = async ( return structuredResponse } case FollowUpPromptProvider.GOOGLE_GENAI: { - const llm = new ChatGoogleGenerativeAI({ + const model = new ChatGoogleGenerativeAI({ apiKey: credentialData.googleGenerativeAPIKey, model: providerConfig.modelName, temperature: parseFloat(`${providerConfig.temperature}`) }) - // use structured output parser because withStructuredOutput is not working - const parser = StructuredOutputParser.fromZodSchema(FollowUpPromptType) - const formatInstructions = parser.getFormatInstructions() - const prompt = PromptTemplate.fromTemplate(` - ${providerConfig.prompt} - - {format_instructions} - `) - const chain = prompt.pipe(llm).pipe(parser) - const structuredResponse = await chain.invoke({ - history: apiMessageContent, - format_instructions: formatInstructions - }) + const structuredLLM = model.withStructuredOutput(FollowUpPromptType) + const structuredResponse = await structuredLLM.invoke(followUpPromptsPrompt) return structuredResponse } case FollowUpPromptProvider.MISTRALAI: { @@ -105,8 +95,10 @@ export const generateFollowUpPrompts = async ( const model = new ChatOpenAI({ apiKey: credentialData.openAIApiKey, model: providerConfig.modelName, - temperature: parseFloat(`${providerConfig.temperature}`) + temperature: parseFloat(`${providerConfig.temperature}`), + useResponsesApi: true }) + // @ts-ignore const structuredLLM = model.withStructuredOutput(FollowUpPromptType) const structuredResponse = await structuredLLM.invoke(followUpPromptsPrompt) return structuredResponse diff --git a/packages/components/src/google-utils.ts b/packages/components/src/google-utils.ts new file mode 100644 index 000000000..f868e6699 --- /dev/null +++ b/packages/components/src/google-utils.ts @@ -0,0 +1,29 @@ +import { getCredentialData, getCredentialParam, type ICommonObject, type INodeData } from '.' +import type { ChatVertexAIInput, VertexAIInput } from '@langchain/google-vertexai' + +type SupportedAuthOptions = ChatVertexAIInput['authOptions'] | VertexAIInput['authOptions'] + +export const buildGoogleCredentials = async (nodeData: INodeData, options: ICommonObject): Promise => { + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const googleApplicationCredentialFilePath = getCredentialParam('googleApplicationCredentialFilePath', credentialData, nodeData) + const googleApplicationCredential = getCredentialParam('googleApplicationCredential', credentialData, nodeData) + const projectID = getCredentialParam('projectID', credentialData, nodeData) + + const authOptions: any = {} + if (Object.keys(credentialData).length !== 0) { + if (!googleApplicationCredentialFilePath && !googleApplicationCredential) + throw new Error('Please specify your Google Application Credential') + if (!googleApplicationCredentialFilePath && !googleApplicationCredential) + throw new Error( + 'Error: More than one component has been inputted. Please use only one of the following: Google Application Credential File Path or Google Credential JSON Object' + ) + + if (googleApplicationCredentialFilePath && !googleApplicationCredential) authOptions.keyFile = googleApplicationCredentialFilePath + else if (!googleApplicationCredentialFilePath && googleApplicationCredential) + authOptions.credentials = JSON.parse(googleApplicationCredential) + + if (projectID) authOptions.projectId = projectID + } + + return authOptions +} diff --git a/packages/components/src/handler.test.ts b/packages/components/src/handler.test.ts new file mode 100644 index 000000000..333b2cba8 --- /dev/null +++ b/packages/components/src/handler.test.ts @@ -0,0 +1,51 @@ +import { getPhoenixTracer } from './handler' + +jest.mock('@opentelemetry/exporter-trace-otlp-proto', () => { + return { + ProtoOTLPTraceExporter: jest.fn().mockImplementation((args) => { + return { args } + }) + } +}) + +import { OTLPTraceExporter as ProtoOTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-proto' + +describe('URL Handling For Phoenix Tracer', () => { + const apiKey = 'test-api-key' + const projectName = 'test-project-name' + + const makeOptions = (baseUrl: string) => ({ + baseUrl, + apiKey, + projectName, + enableCallback: false + }) + + beforeEach(() => { + jest.clearAllMocks() + }) + + const cases: [string, string][] = [ + ['http://localhost:6006', 'http://localhost:6006/v1/traces'], + ['http://localhost:6006/v1/traces', 'http://localhost:6006/v1/traces'], + ['https://app.phoenix.arize.com', 'https://app.phoenix.arize.com/v1/traces'], + ['https://app.phoenix.arize.com/v1/traces', 'https://app.phoenix.arize.com/v1/traces'], + ['https://app.phoenix.arize.com/s/my-space', 'https://app.phoenix.arize.com/s/my-space/v1/traces'], + ['https://app.phoenix.arize.com/s/my-space/v1/traces', 'https://app.phoenix.arize.com/s/my-space/v1/traces'], + ['https://my-phoenix.com/my-slug', 'https://my-phoenix.com/my-slug/v1/traces'], + ['https://my-phoenix.com/my-slug/v1/traces', 'https://my-phoenix.com/my-slug/v1/traces'] + ] + + it.each(cases)('baseUrl %s - exporterUrl %s', (input, expected) => { + getPhoenixTracer(makeOptions(input)) + expect(ProtoOTLPTraceExporter).toHaveBeenCalledWith( + expect.objectContaining({ + url: expected, + headers: expect.objectContaining({ + api_key: apiKey, + authorization: `Bearer ${apiKey}` + }) + }) + ) + }) +}) diff --git a/packages/components/src/handler.ts b/packages/components/src/handler.ts index 0da42f479..c7dec69c3 100644 --- a/packages/components/src/handler.ts +++ b/packages/components/src/handler.ts @@ -1,4 +1,5 @@ import { Logger } from 'winston' +import { URL } from 'url' import { v4 as uuidv4 } from 'uuid' import { Client } from 'langsmith' import CallbackHandler from 'langfuse-langchain' @@ -25,6 +26,8 @@ import { AgentAction } from '@langchain/core/agents' import { LunaryHandler } from '@langchain/community/callbacks/handlers/lunary' import { getCredentialData, getCredentialParam, getEnvironmentVariable } from './utils' +import { EvaluationRunTracer } from '../evaluation/EvaluationRunTracer' +import { EvaluationRunTracerLlama } from '../evaluation/EvaluationRunTracerLlama' import { ICommonObject, IDatabaseEntity, INodeData, IServerSideEventStreamer } from './Interface' import { LangWatch, LangWatchSpan, LangWatchTrace, autoconvertTypedValues } from 'langwatch' import { DataSource } from 'typeorm' @@ -32,7 +35,7 @@ import { ChatGenerationChunk } from '@langchain/core/outputs' import { AIMessageChunk, BaseMessageLike } from '@langchain/core/messages' import { Serialized } from '@langchain/core/load/serializable' -interface AgentRun extends Run { +export interface AgentRun extends Run { actions: AgentAction[] } @@ -89,14 +92,27 @@ interface PhoenixTracerOptions { enableCallback?: boolean } -function getPhoenixTracer(options: PhoenixTracerOptions): Tracer | undefined { +export function getPhoenixTracer(options: PhoenixTracerOptions): Tracer | undefined { const SEMRESATTRS_PROJECT_NAME = 'openinference.project.name' try { + const parsedURL = new URL(options.baseUrl) + const baseEndpoint = `${parsedURL.protocol}//${parsedURL.host}` + + // Remove trailing slashes + let path = parsedURL.pathname.replace(/\/$/, '') + + // Remove any existing /v1/traces suffix + path = path.replace(/\/v1\/traces$/, '') + + const exporterUrl = `${baseEndpoint}${path}/v1/traces` + const exporterHeaders = { + api_key: options.apiKey || '', + authorization: `Bearer ${options.apiKey || ''}` + } + const traceExporter = new ProtoOTLPTraceExporter({ - url: `${options.baseUrl}/v1/traces`, - headers: { - api_key: options.apiKey - } + url: exporterUrl, + headers: exporterHeaders }) const tracerProvider = new NodeTracerProvider({ resource: new Resource({ @@ -173,7 +189,7 @@ function tryGetJsonSpaces() { } } -function tryJsonStringify(obj: unknown, fallback: string) { +export function tryJsonStringify(obj: unknown, fallback: string) { try { return JSON.stringify(obj, null, tryGetJsonSpaces()) } catch (err) { @@ -181,7 +197,7 @@ function tryJsonStringify(obj: unknown, fallback: string) { } } -function elapsed(run: Run): string { +export function elapsed(run: Run): string { if (!run.end_time) return '' const elapsed = run.end_time - run.start_time if (elapsed < 1000) { @@ -193,14 +209,16 @@ function elapsed(run: Run): string { export class ConsoleCallbackHandler extends BaseTracer { name = 'console_callback_handler' as const logger: Logger + orgId?: string protected persistRun(_run: Run) { return Promise.resolve() } - constructor(logger: Logger) { + constructor(logger: Logger, orgId?: string) { super() this.logger = logger + this.orgId = orgId if (getEnvironmentVariable('DEBUG') === 'true') { logger.level = getEnvironmentVariable('LOG_LEVEL') ?? 'info' } @@ -235,57 +253,76 @@ export class ConsoleCallbackHandler extends BaseTracer { onChainStart(run: Run) { const crumbs = this.getBreadcrumbs(run) - this.logger.verbose(`[chain/start] [${crumbs}] Entering Chain run with input: ${tryJsonStringify(run.inputs, '[inputs]')}`) + this.logger.verbose( + `[${this.orgId}]: [chain/start] [${crumbs}] Entering Chain run with input: ${tryJsonStringify(run.inputs, '[inputs]')}` + ) } onChainEnd(run: Run) { const crumbs = this.getBreadcrumbs(run) this.logger.verbose( - `[chain/end] [${crumbs}] [${elapsed(run)}] Exiting Chain run with output: ${tryJsonStringify(run.outputs, '[outputs]')}` + `[${this.orgId}]: [chain/end] [${crumbs}] [${elapsed(run)}] Exiting Chain run with output: ${tryJsonStringify( + run.outputs, + '[outputs]' + )}` ) } onChainError(run: Run) { const crumbs = this.getBreadcrumbs(run) this.logger.verbose( - `[chain/error] [${crumbs}] [${elapsed(run)}] Chain run errored with error: ${tryJsonStringify(run.error, '[error]')}` + `[${this.orgId}]: [chain/error] [${crumbs}] [${elapsed(run)}] Chain run errored with error: ${tryJsonStringify( + run.error, + '[error]' + )}` ) } onLLMStart(run: Run) { const crumbs = this.getBreadcrumbs(run) const inputs = 'prompts' in run.inputs ? { prompts: (run.inputs.prompts as string[]).map((p) => p.trim()) } : run.inputs - this.logger.verbose(`[llm/start] [${crumbs}] Entering LLM run with input: ${tryJsonStringify(inputs, '[inputs]')}`) + this.logger.verbose(`[${this.orgId}]: [llm/start] [${crumbs}] Entering LLM run with input: ${tryJsonStringify(inputs, '[inputs]')}`) } onLLMEnd(run: Run) { const crumbs = this.getBreadcrumbs(run) this.logger.verbose( - `[llm/end] [${crumbs}] [${elapsed(run)}] Exiting LLM run with output: ${tryJsonStringify(run.outputs, '[response]')}` + `[${this.orgId}]: [llm/end] [${crumbs}] [${elapsed(run)}] Exiting LLM run with output: ${tryJsonStringify( + run.outputs, + '[response]' + )}` ) } onLLMError(run: Run) { const crumbs = this.getBreadcrumbs(run) this.logger.verbose( - `[llm/error] [${crumbs}] [${elapsed(run)}] LLM run errored with error: ${tryJsonStringify(run.error, '[error]')}` + `[${this.orgId}]: [llm/error] [${crumbs}] [${elapsed(run)}] LLM run errored with error: ${tryJsonStringify( + run.error, + '[error]' + )}` ) } onToolStart(run: Run) { const crumbs = this.getBreadcrumbs(run) - this.logger.verbose(`[tool/start] [${crumbs}] Entering Tool run with input: "${run.inputs.input?.trim()}"`) + this.logger.verbose(`[${this.orgId}]: [tool/start] [${crumbs}] Entering Tool run with input: "${run.inputs.input?.trim()}"`) } onToolEnd(run: Run) { const crumbs = this.getBreadcrumbs(run) - this.logger.verbose(`[tool/end] [${crumbs}] [${elapsed(run)}] Exiting Tool run with output: "${run.outputs?.output?.trim()}"`) + this.logger.verbose( + `[${this.orgId}]: [tool/end] [${crumbs}] [${elapsed(run)}] Exiting Tool run with output: "${run.outputs?.output?.trim()}"` + ) } onToolError(run: Run) { const crumbs = this.getBreadcrumbs(run) this.logger.verbose( - `[tool/error] [${crumbs}] [${elapsed(run)}] Tool run errored with error: ${tryJsonStringify(run.error, '[error]')}` + `[${this.orgId}]: [tool/error] [${crumbs}] [${elapsed(run)}] Tool run errored with error: ${tryJsonStringify( + run.error, + '[error]' + )}` ) } @@ -293,7 +330,7 @@ export class ConsoleCallbackHandler extends BaseTracer { const agentRun = run as AgentRun const crumbs = this.getBreadcrumbs(run) this.logger.verbose( - `[agent/action] [${crumbs}] Agent selected action: ${tryJsonStringify( + `[${this.orgId}]: [agent/action] [${crumbs}] Agent selected action: ${tryJsonStringify( agentRun.actions[agentRun.actions.length - 1], '[action]' )}` @@ -396,6 +433,7 @@ export class CustomChainHandler extends BaseCallbackHandler { } } +/*TODO - Add llamaIndex tracer to non evaluation runs*/ class ExtendedLunaryHandler extends LunaryHandler { chatId: string appDataSource: DataSource @@ -550,6 +588,13 @@ export const additionalCallbacks = async (nodeData: INodeData, options: ICommonO const handler = new ExtendedLunaryHandler(lunaryFields) callbacks.push(handler) + } else if (provider === 'evaluation') { + if (options.llamaIndex) { + new EvaluationRunTracerLlama(options.evaluationRunId) + } else { + const evaluationHandler = new EvaluationRunTracer(options.evaluationRunId) + callbacks.push(evaluationHandler) + } } else if (provider === 'langWatch') { const langWatchApiKey = getCredentialParam('langWatchApiKey', credentialData, nodeData) const langWatchEndpoint = getCredentialParam('langWatchEndpoint', credentialData, nodeData) @@ -560,6 +605,15 @@ export const additionalCallbacks = async (nodeData: INodeData, options: ICommonO }) const trace = langwatch.getTrace() + + if (nodeData?.inputs?.analytics?.langWatch) { + trace.update({ + metadata: { + ...nodeData?.inputs?.analytics?.langWatch + } + }) + } + callbacks.push(trace.getLangChainCallback()) } else if (provider === 'arize') { const arizeApiKey = getCredentialParam('arizeApiKey', credentialData, nodeData) @@ -1720,7 +1774,7 @@ export class AnalyticHandler { } if (Object.prototype.hasOwnProperty.call(this.handlers, 'lunary')) { - const toolEventId: string = this.handlers['lunary'].llmEvent[returnIds['lunary'].toolEvent] + const toolEventId: string = this.handlers['lunary'].toolEvent[returnIds['lunary'].toolEvent] const monitor = this.handlers['lunary'].client if (monitor && toolEventId) { diff --git a/packages/components/src/httpSecurity.ts b/packages/components/src/httpSecurity.ts new file mode 100644 index 000000000..d729f8da5 --- /dev/null +++ b/packages/components/src/httpSecurity.ts @@ -0,0 +1,221 @@ +import * as ipaddr from 'ipaddr.js' +import dns from 'dns/promises' +import axios, { AxiosRequestConfig, AxiosResponse } from 'axios' +import fetch, { RequestInit, Response } from 'node-fetch' + +/** + * Checks if an IP address is in the deny list + * @param ip - IP address to check + * @param denyList - Array of denied IP addresses/CIDR ranges + * @throws Error if IP is in deny list + */ +export function isDeniedIP(ip: string, denyList: string[]): void { + const parsedIp = ipaddr.parse(ip) + for (const entry of denyList) { + if (entry.includes('/')) { + try { + const [range, _] = entry.split('/') + const parsedRange = ipaddr.parse(range) + if (parsedIp.kind() === parsedRange.kind()) { + if (parsedIp.match(ipaddr.parseCIDR(entry))) { + throw new Error('Access to this host is denied by policy.') + } + } + } catch (error) { + throw new Error(`isDeniedIP: ${error}`) + } + } else if (ip === entry) { + throw new Error('Access to this host is denied by policy.') + } + } +} + +/** + * Checks if a URL is allowed based on HTTP_DENY_LIST environment variable + * @param url - URL to check + * @throws Error if URL hostname resolves to a denied IP + */ +export async function checkDenyList(url: string): Promise { + const httpDenyListString: string | undefined = process.env.HTTP_DENY_LIST + if (!httpDenyListString) return + + const httpDenyList = httpDenyListString.split(',').map((ip) => ip.trim()) + const urlObj = new URL(url) + const hostname = urlObj.hostname + + if (ipaddr.isValid(hostname)) { + isDeniedIP(hostname, httpDenyList) + } else { + const addresses = await dns.lookup(hostname, { all: true }) + for (const address of addresses) { + isDeniedIP(address.address, httpDenyList) + } + } +} + +/** + * Makes a secure HTTP request that validates all URLs in redirect chains against the deny list + * @param config - Axios request configuration + * @param maxRedirects - Maximum number of redirects to follow (default: 5) + * @returns Promise + * @throws Error if any URL in the redirect chain is denied + */ +export async function secureAxiosRequest(config: AxiosRequestConfig, maxRedirects: number = 5): Promise { + let currentUrl = config.url + let redirectCount = 0 + let currentConfig = { ...config, maxRedirects: 0 } // Disable automatic redirects + + // Validate the initial URL + if (currentUrl) { + await checkDenyList(currentUrl) + } + + while (redirectCount <= maxRedirects) { + try { + // Update the URL in config for subsequent requests + currentConfig.url = currentUrl + + const response = await axios(currentConfig) + + // If it's a successful response (not a redirect), return it + if (response.status < 300 || response.status >= 400) { + return response + } + + // Handle redirect + const location = response.headers.location + if (!location) { + // No location header, but it's a redirect status - return the response + return response + } + + redirectCount++ + + if (redirectCount > maxRedirects) { + throw new Error('Too many redirects') + } + + // Resolve the redirect URL (handle relative URLs) + const redirectUrl = new URL(location, currentUrl).toString() + + // Validate the redirect URL against the deny list + await checkDenyList(redirectUrl) + + // Update current URL for next iteration + currentUrl = redirectUrl + + // For redirects, we only need to preserve certain headers and change method if needed + if (response.status === 301 || response.status === 302 || response.status === 303) { + // For 303, or when redirecting POST requests, change to GET + if ( + response.status === 303 || + (currentConfig.method && ['POST', 'PUT', 'PATCH'].includes(currentConfig.method.toUpperCase())) + ) { + currentConfig.method = 'GET' + delete currentConfig.data + } + } + } catch (error) { + // If it's not a redirect-related error from axios, propagate it + if (error.response && error.response.status >= 300 && error.response.status < 400) { + // This is a redirect response that axios couldn't handle automatically + // Continue with our manual redirect handling + const response = error.response + const location = response.headers.location + + if (!location) { + return response + } + + redirectCount++ + + if (redirectCount > maxRedirects) { + throw new Error('Too many redirects') + } + + const redirectUrl = new URL(location, currentUrl).toString() + await checkDenyList(redirectUrl) + currentUrl = redirectUrl + + // Handle method changes for redirects + if (response.status === 301 || response.status === 302 || response.status === 303) { + if ( + response.status === 303 || + (currentConfig.method && ['POST', 'PUT', 'PATCH'].includes(currentConfig.method.toUpperCase())) + ) { + currentConfig.method = 'GET' + delete currentConfig.data + } + } + continue + } + + // For other errors, re-throw + throw error + } + } + + throw new Error('Too many redirects') +} + +/** + * Makes a secure fetch request that validates all URLs in redirect chains against the deny list + * @param url - URL to fetch + * @param init - Fetch request options + * @param maxRedirects - Maximum number of redirects to follow (default: 5) + * @returns Promise + * @throws Error if any URL in the redirect chain is denied + */ +export async function secureFetch(url: string, init?: RequestInit, maxRedirects: number = 5): Promise { + let currentUrl = url + let redirectCount = 0 + let currentInit = { ...init, redirect: 'manual' as const } // Disable automatic redirects + + // Validate the initial URL + await checkDenyList(currentUrl) + + while (redirectCount <= maxRedirects) { + const response = await fetch(currentUrl, currentInit) + + // If it's a successful response (not a redirect), return it + if (response.status < 300 || response.status >= 400) { + return response + } + + // Handle redirect + const location = response.headers.get('location') + if (!location) { + // No location header, but it's a redirect status - return the response + return response + } + + redirectCount++ + + if (redirectCount > maxRedirects) { + throw new Error('Too many redirects') + } + + // Resolve the redirect URL (handle relative URLs) + const redirectUrl = new URL(location, currentUrl).toString() + + // Validate the redirect URL against the deny list + await checkDenyList(redirectUrl) + + // Update current URL for next iteration + currentUrl = redirectUrl + + // Handle method changes for redirects according to HTTP specs + if (response.status === 301 || response.status === 302 || response.status === 303) { + // For 303, or when redirecting POST/PUT/PATCH requests, change to GET + if (response.status === 303 || (currentInit.method && ['POST', 'PUT', 'PATCH'].includes(currentInit.method.toUpperCase()))) { + currentInit = { + ...currentInit, + method: 'GET', + body: undefined + } + } + } + } + + throw new Error('Too many redirects') +} diff --git a/packages/components/src/index.ts b/packages/components/src/index.ts index 2944bc320..8003d9d3e 100644 --- a/packages/components/src/index.ts +++ b/packages/components/src/index.ts @@ -7,8 +7,11 @@ dotenv.config({ path: envPath, override: true }) export * from './Interface' export * from './utils' export * from './speechToText' +export * from './textToSpeech' export * from './storageUtils' export * from './handler' +export * from '../evaluation/EvaluationRunner' export * from './followUpPrompts' export * from './validator' export * from './agentflowv2Generator' +export * from './httpSecurity' diff --git a/packages/components/src/modelLoader.ts b/packages/components/src/modelLoader.ts index be588d573..dc728634b 100644 --- a/packages/components/src/modelLoader.ts +++ b/packages/components/src/modelLoader.ts @@ -1,9 +1,7 @@ -import { INodeOptionsValue } from './Interface' import axios from 'axios' import * as fs from 'fs' import * as path from 'path' - -const MASTER_MODEL_LIST = 'https://raw.githubusercontent.com/FlowiseAI/Flowise/main/packages/components/models.json' +import { INodeOptionsValue } from './Interface' export enum MODEL_TYPE { CHAT = 'chat', @@ -31,49 +29,62 @@ const isValidUrl = (urlString: string) => { return url.protocol === 'http:' || url.protocol === 'https:' } -const getModelConfig = async (category: MODEL_TYPE, name: string) => { - const modelFile = process.env.MODEL_LIST_CONFIG_JSON || MASTER_MODEL_LIST - - if (!modelFile) { - throw new Error('MODEL_LIST_CONFIG_JSON not set') - } - if (isValidUrl(modelFile)) { - try { +/** + * Load the raw model file from either a URL or a local file + * If any of the loading fails, fallback to the default models.json file on disk + */ +const getRawModelFile = async () => { + const modelFile = + process.env.MODEL_LIST_CONFIG_JSON ?? 'https://raw.githubusercontent.com/FlowiseAI/Flowise/main/packages/components/models.json' + try { + if (isValidUrl(modelFile)) { const resp = await axios.get(modelFile) if (resp.status === 200 && resp.data) { - const models = resp.data - const categoryModels = models[category] - return categoryModels.find((model: INodeOptionsValue) => model.name === name) + return resp.data } else { throw new Error('Error fetching model list') } - } catch (e) { - const models = await fs.promises.readFile(getModelsJSONPath(), 'utf8') + } else if (fs.existsSync(modelFile)) { + const models = await fs.promises.readFile(modelFile, 'utf8') if (models) { - const categoryModels = JSON.parse(models)[category] - return categoryModels.find((model: INodeOptionsValue) => model.name === name) + return JSON.parse(models) } - return {} } - } else { - try { - if (fs.existsSync(modelFile)) { - const models = await fs.promises.readFile(modelFile, 'utf8') - if (models) { - const categoryModels = JSON.parse(models)[category] - return categoryModels.find((model: INodeOptionsValue) => model.name === name) + throw new Error('Model file does not exist or is empty') + } catch (e) { + const models = await fs.promises.readFile(getModelsJSONPath(), 'utf8') + if (models) { + return JSON.parse(models) + } + return {} + } +} + +const getModelConfig = async (category: MODEL_TYPE, name: string) => { + const models = await getRawModelFile() + + const categoryModels = models[category] + return categoryModels.find((model: INodeOptionsValue) => model.name === name) +} + +export const getModelConfigByModelName = async (category: MODEL_TYPE, provider: string | undefined, name: string | undefined) => { + const models = await getRawModelFile() + + const categoryModels = models[category] + return getSpecificModelFromCategory(categoryModels, provider, name) +} + +const getSpecificModelFromCategory = (categoryModels: any, provider: string | undefined, name: string | undefined) => { + for (const cm of categoryModels) { + if (cm.models && cm.name.toLowerCase() === provider?.toLowerCase()) { + for (const m of cm.models) { + if (m.name === name) { + return m } } - return {} - } catch (e) { - const models = await fs.promises.readFile(getModelsJSONPath(), 'utf8') - if (models) { - const categoryModels = JSON.parse(models)[category] - return categoryModels.find((model: INodeOptionsValue) => model.name === name) - } - return {} } } + return undefined } export const getModels = async (category: MODEL_TYPE, name: string) => { diff --git a/packages/components/src/multiModalUtils.ts b/packages/components/src/multiModalUtils.ts index 412361aa6..b048e8a91 100644 --- a/packages/components/src/multiModalUtils.ts +++ b/packages/components/src/multiModalUtils.ts @@ -16,7 +16,7 @@ export const addImagesToMessages = async ( for (const upload of imageUploads) { let bf = upload.data if (upload.type == 'stored-file') { - const contents = await getFileFromStorage(upload.name, options.chatflowid, options.chatId) + const contents = await getFileFromStorage(upload.name, options.orgId, options.chatflowid, options.chatId) // as the image is stored in the server, read the file and convert it to base64 bf = 'data:' + upload.mime + ';base64,' + contents.toString('base64') diff --git a/packages/components/src/secureZodParser.ts b/packages/components/src/secureZodParser.ts new file mode 100644 index 000000000..544a8f755 --- /dev/null +++ b/packages/components/src/secureZodParser.ts @@ -0,0 +1,678 @@ +import { z } from 'zod' + +/** + * This parser safely handles Zod schema strings without allowing arbitrary code execution + */ +export class SecureZodSchemaParser { + private static readonly ALLOWED_TYPES = [ + 'string', + 'number', + 'int', + 'boolean', + 'date', + 'object', + 'array', + 'enum', + 'optional', + 'max', + 'min', + 'describe', + 'default' + ] + + /** + * Safely parse a Zod schema string into a Zod schema object + * @param schemaString The Zod schema as a string (e.g., "z.object({name: z.string()})") + * @returns A Zod schema object + * @throws Error if the schema is invalid or contains unsafe patterns + */ + static parseZodSchema(schemaString: string): z.ZodTypeAny { + try { + // Remove comments and normalize whitespace + const cleanedSchema = this.cleanSchemaString(schemaString) + + // Parse the schema structure + const parsed = this.parseSchemaStructure(cleanedSchema) + + // Build the Zod schema securely + return this.buildZodSchema(parsed) + } catch (error) { + throw new Error(`Failed to parse Zod schema: ${error.message}`) + } + } + + private static cleanSchemaString(schema: string): string { + // Remove single-line comments + schema = schema.replace(/\/\/.*$/gm, '') + + // Remove multi-line comments + schema = schema.replace(/\/\*[\s\S]*?\*\//g, '') + + // Normalize whitespace + schema = schema.replace(/\s+/g, ' ').trim() + + return schema + } + + private static parseSchemaStructure(schema: string): any { + // This is a simplified parser that handles common Zod patterns safely + // It does NOT use eval/Function and only handles predefined safe patterns + + if (!schema.startsWith('z.object(')) { + throw new Error('Schema must start with z.object()') + } + + // Extract the object content + const objectMatch = schema.match(/z\.object\(\s*\{([\s\S]*)\}\s*\)/) + if (!objectMatch) { + throw new Error('Invalid z.object() syntax') + } + + const objectContent = objectMatch[1] + return this.parseObjectProperties(objectContent) + } + + private static parseObjectProperties(content: string): Record { + const properties: Record = {} + + // Split by comma, but handle nested structures + const props = this.splitProperties(content) + + for (const prop of props) { + const [key, value] = this.parseProperty(prop) + if (key && value) { + properties[key] = value + } + } + + return properties + } + + private static splitProperties(content: string): string[] { + const properties: string[] = [] + let current = '' + let depth = 0 + let inString = false + let stringChar = '' + + for (let i = 0; i < content.length; i++) { + const char = content[i] + + if (!inString && (char === '"' || char === "'")) { + inString = true + stringChar = char + } else if (inString && char === stringChar && content[i - 1] !== '\\') { + inString = false + } else if (!inString) { + if (char === '(' || char === '[' || char === '{') { + depth++ + } else if (char === ')' || char === ']' || char === '}') { + depth-- + } else if (char === ',' && depth === 0) { + properties.push(current.trim()) + current = '' + continue + } + } + + current += char + } + + if (current.trim()) { + properties.push(current.trim()) + } + + return properties + } + + private static parseProperty(prop: string): [string | null, any] { + const colonIndex = prop.indexOf(':') + if (colonIndex === -1) return [null, null] + + const key = prop.substring(0, colonIndex).trim().replace(/['"]/g, '') + const value = prop.substring(colonIndex + 1).trim() + + return [key, this.parseZodType(value)] + } + + private static parseZodType(typeStr: string): any { + // Check if this is a nested object (not in an array) + if (typeStr.startsWith('z.object(') && !typeStr.startsWith('z.array(')) { + // Check if there are modifiers after the object + const objectWithModifiers = this.extractObjectWithModifiers(typeStr) + if (objectWithModifiers.hasModifiers) { + const objectMatch = objectWithModifiers.objectPart.match(/z\.object\(\s*\{([\s\S]*)\}\s*\)/) + if (!objectMatch) { + throw new Error('Invalid object syntax') + } + + const objectContent = objectMatch[1] + const objectProperties = this.parseObjectProperties(objectContent) + + return { + isNestedObject: true, + objectSchema: objectProperties, + modifiers: objectWithModifiers.modifiers + } + } + + // Original code for objects without modifiers + const objectMatch = typeStr.match(/z\.object\(\s*\{([\s\S]*)\}\s*\)/) + if (!objectMatch) { + throw new Error('Invalid object syntax') + } + + const objectContent = objectMatch[1] + const objectProperties = this.parseObjectProperties(objectContent) + + return { + isNestedObject: true, + objectSchema: objectProperties + } + } + + // Check if this is any kind of array + if (typeStr.startsWith('z.array(')) { + // Check if there are modifiers after the array + const arrayWithModifiers = this.extractArrayWithModifiers(typeStr) + if (arrayWithModifiers.hasModifiers) { + const arrayResult = this.parseArray(arrayWithModifiers.arrayPart) + // Convert array result to have modifiers + return { + ...arrayResult, + modifiers: arrayWithModifiers.modifiers + } + } + return this.parseArray(typeStr) + } + + const type: { base: string; modifiers: any[]; baseArgs?: any[] } = { base: '', modifiers: [] } + + // Handle chained methods like z.string().max(500).optional() + const parts = typeStr.split('.') + + for (let i = 0; i < parts.length; i++) { + const part = parts[i].trim() + + if (i === 0) { + // First part should be 'z' + if (part !== 'z') { + throw new Error(`Expected 'z' but got '${part}'`) + } + continue + } + + if (i === 1) { + // Second part is the base type + const baseMatch = part.match(/^(\w+)(\(.*\))?$/) + if (!baseMatch) { + throw new Error(`Invalid base type: ${part}`) + } + + type.base = baseMatch[1] + if (baseMatch[2]) { + // Parse arguments for base type (e.g., enum values) + const args = this.parseArguments(baseMatch[2]) + type.baseArgs = args + } + } else { + // Subsequent parts are modifiers + const modMatch = part.match(/^(\w+)(\(.*\))?$/) + if (!modMatch) { + throw new Error(`Invalid modifier: ${part}`) + } + + const modName = modMatch[1] + const modArgs = modMatch[2] ? this.parseArguments(modMatch[2]) : [] + + type.modifiers.push({ name: modName, args: modArgs }) + } + } + + return type + } + + private static parseArray(typeStr: string): any { + // Extract the content inside array() + const arrayContentMatch = typeStr.match(/z\.array\(\s*([\s\S]*)\s*\)$/) + if (!arrayContentMatch) { + throw new Error('Invalid array syntax') + } + + const arrayContent = arrayContentMatch[1].trim() + + // Parse the object inside the array + if (arrayContent.startsWith('z.object(')) { + // Extract object content + const objectMatch = arrayContent.match(/z\.object\(\s*\{([\s\S]*)\}\s*\)/) + if (!objectMatch) { + throw new Error('Invalid object syntax inside array') + } + + const objectContent = objectMatch[1] + const objectProperties = this.parseObjectProperties(objectContent) + + // Validate each property in the nested object + for (const propValue of Object.values(objectProperties)) { + this.validateTypeInfo(propValue) + } + + return { + isArrayOfObjects: true, + objectSchema: objectProperties + } + } + + // Handle simple arrays (e.g., z.array(z.string())) + const innerType = this.parseZodType(arrayContent) + + return { + isSimpleArray: true, + innerType: innerType + } + } + + private static validateTypeInfo(typeInfo: any): void { + // If it's a nested object or array of objects, validate each property + if (typeInfo.isNestedObject || typeInfo.isArrayOfObjects) { + for (const propValue of Object.values(typeInfo.objectSchema)) { + this.validateTypeInfo(propValue) + } + return + } + + // If it's a simple array, validate the inner type + if (typeInfo.isSimpleArray) { + this.validateTypeInfo(typeInfo.innerType) + return + } + + // Validate base type + if (!this.ALLOWED_TYPES.includes(typeInfo.base)) { + throw new Error(`Unsupported type: ${typeInfo.base}`) + } + + // Validate modifiers + for (const modifier of typeInfo.modifiers || []) { + if (!this.ALLOWED_TYPES.includes(modifier.name)) { + throw new Error(`Unsupported modifier: ${modifier.name}`) + } + } + } + + private static parseArguments(argsStr: string): any[] { + // Remove outer parentheses + const inner = argsStr.slice(1, -1).trim() + if (!inner) return [] + + // Simple argument parsing for basic cases + if (inner.startsWith('[') && inner.endsWith(']')) { + // Array argument + const arrayContent = inner.slice(1, -1) + return [this.parseArrayContent(arrayContent)] + } else if (inner.match(/^\d+$/)) { + // Number argument + return [parseInt(inner, 10)] + } else if (inner.startsWith('"') && inner.endsWith('"')) { + // String argument + return [inner.slice(1, -1)] + } else { + // Try to parse as comma-separated values + return inner.split(',').map((arg) => { + arg = arg.trim() + if (arg.match(/^\d+$/)) return parseInt(arg, 10) + if (arg.startsWith('"') && arg.endsWith('"')) return arg.slice(1, -1) + return arg + }) + } + } + + private static parseArrayContent(content: string): string[] { + const items: string[] = [] + let current = '' + let inString = false + let stringChar = '' + + for (let i = 0; i < content.length; i++) { + const char = content[i] + + if (!inString && (char === '"' || char === "'")) { + inString = true + stringChar = char + current += char + } else if (inString && char === stringChar && content[i - 1] !== '\\') { + inString = false + current += char + } else if (!inString && char === ',') { + items.push(current.trim().replace(/^["']|["']$/g, '')) + current = '' + } else { + current += char + } + } + + if (current.trim()) { + items.push(current.trim().replace(/^["']|["']$/g, '')) + } + + return items + } + + private static extractArrayWithModifiers(typeStr: string): { arrayPart: string; modifiers: any[]; hasModifiers: boolean } { + // Find the matching closing parenthesis for z.array( + let depth = 0 + let arrayEndIndex = -1 + let startIndex = typeStr.indexOf('z.array(') + 7 // Position after "z.array" + + for (let i = startIndex; i < typeStr.length; i++) { + if (typeStr[i] === '(') depth++ + else if (typeStr[i] === ')') { + depth-- + if (depth === 0) { + arrayEndIndex = i + 1 + break + } + } + } + + if (arrayEndIndex === -1) { + return { arrayPart: typeStr, modifiers: [], hasModifiers: false } + } + + const arrayPart = typeStr.substring(0, arrayEndIndex) + const remainingPart = typeStr.substring(arrayEndIndex) + + if (!remainingPart.startsWith('.')) { + return { arrayPart: typeStr, modifiers: [], hasModifiers: false } + } + + // Parse modifiers + const modifiers: any[] = [] + const modifierParts = remainingPart.substring(1).split('.') + + for (const part of modifierParts) { + const modMatch = part.match(/^(\w+)(\(.*\))?$/) + if (!modMatch) { + throw new Error(`Invalid modifier: ${part}`) + } + + const modName = modMatch[1] + const modArgs = modMatch[2] ? this.parseArguments(modMatch[2]) : [] + + if (!this.ALLOWED_TYPES.includes(modName)) { + throw new Error(`Unsupported modifier: ${modName}`) + } + + modifiers.push({ name: modName, args: modArgs }) + } + + return { arrayPart, modifiers, hasModifiers: true } + } + + private static extractObjectWithModifiers(typeStr: string): { objectPart: string; modifiers: any[]; hasModifiers: boolean } { + // Find the matching closing brace and parenthesis for z.object({...}) + let braceDepth = 0 + let parenDepth = 0 + let objectEndIndex = -1 + let startIndex = typeStr.indexOf('z.object(') + 8 // Position after "z.object" + let foundOpenBrace = false + + for (let i = startIndex; i < typeStr.length; i++) { + if (typeStr[i] === '{') { + braceDepth++ + foundOpenBrace = true + } else if (typeStr[i] === '}') { + braceDepth-- + } else if (typeStr[i] === '(' && foundOpenBrace) { + parenDepth++ + } else if (typeStr[i] === ')' && foundOpenBrace) { + if (braceDepth === 0 && parenDepth === 0) { + objectEndIndex = i + 1 + break + } + parenDepth-- + } + } + + if (objectEndIndex === -1) { + return { objectPart: typeStr, modifiers: [], hasModifiers: false } + } + + const objectPart = typeStr.substring(0, objectEndIndex) + const remainingPart = typeStr.substring(objectEndIndex) + + if (!remainingPart.startsWith('.')) { + return { objectPart: typeStr, modifiers: [], hasModifiers: false } + } + + // Parse modifiers (need special handling for .default() with object argument) + const modifiers: any[] = [] + let i = 1 // Skip the initial dot + + while (i < remainingPart.length) { + // Find modifier name + const modNameMatch = remainingPart.substring(i).match(/^(\w+)/) + if (!modNameMatch) break + + const modName = modNameMatch[1] + i += modName.length + + // Check for arguments + let modArgs: any[] = [] + if (i < remainingPart.length && remainingPart[i] === '(') { + // Find matching closing paren, handling nested structures + let depth = 0 + let argStart = i + for (let j = i; j < remainingPart.length; j++) { + if (remainingPart[j] === '(') depth++ + else if (remainingPart[j] === ')') { + depth-- + if (depth === 0) { + const argsStr = remainingPart.substring(argStart, j + 1) + modArgs = this.parseComplexArguments(argsStr) + i = j + 1 + break + } + } + } + } + + if (!this.ALLOWED_TYPES.includes(modName)) { + throw new Error(`Unsupported modifier: ${modName}`) + } + + modifiers.push({ name: modName, args: modArgs }) + + // Skip dot if present + if (i < remainingPart.length && remainingPart[i] === '.') { + i++ + } + } + + return { objectPart, modifiers, hasModifiers: modifiers.length > 0 } + } + + private static parseComplexArguments(argsStr: string): any[] { + // Remove outer parentheses + const inner = argsStr.slice(1, -1).trim() + if (!inner) return [] + + // Check if it's an object literal + if (inner.startsWith('{') && inner.endsWith('}')) { + // Parse object literal for .default() + return [this.parseObjectLiteral(inner)] + } + + // Use existing parseArguments for simple cases + return this.parseArguments(argsStr) + } + + private static parseObjectLiteral(objStr: string): any { + // Simple object literal parser for default values + const obj: any = {} + const content = objStr.slice(1, -1).trim() // Remove { } + + if (!content) return obj + + // Split by comma at depth 0 + const props = this.splitProperties(content) + + for (const prop of props) { + const colonIndex = prop.indexOf(':') + if (colonIndex === -1) continue + + const key = prop.substring(0, colonIndex).trim().replace(/['"]/g, '') + const valueStr = prop.substring(colonIndex + 1).trim() + + // Parse the value + if (valueStr.startsWith('[') && valueStr.endsWith(']')) { + // Array value + const arrayContent = valueStr.slice(1, -1) + obj[key] = this.parseArrayContent(arrayContent) + } else if (valueStr.startsWith('"') && valueStr.endsWith('"')) { + // String value + obj[key] = valueStr.slice(1, -1) + } else if (valueStr.match(/^\d+$/)) { + // Number value + obj[key] = parseInt(valueStr, 10) + } else { + obj[key] = valueStr + } + } + + return obj + } + + private static buildZodSchema(parsed: Record): z.ZodObject { + const schemaObj: Record = {} + + for (const [key, typeInfo] of Object.entries(parsed)) { + schemaObj[key] = this.buildZodType(typeInfo) + } + + return z.object(schemaObj) + } + + private static buildZodType(typeInfo: any): z.ZodTypeAny { + // Special case for nested objects + if (typeInfo.isNestedObject) { + let zodType: z.ZodTypeAny = this.buildZodSchema(typeInfo.objectSchema) + + // Apply modifiers if present + if (typeInfo.modifiers) { + zodType = this.applyModifiers(zodType, typeInfo.modifiers) + } + + return zodType + } + + // Special case for array of objects + if (typeInfo.isArrayOfObjects) { + const objectSchema = this.buildZodSchema(typeInfo.objectSchema) + let zodType: z.ZodTypeAny = z.array(objectSchema) + + // Apply modifiers if present + if (typeInfo.modifiers) { + zodType = this.applyModifiers(zodType, typeInfo.modifiers) + } + + return zodType + } + + // Special case for simple arrays + if (typeInfo.isSimpleArray) { + const innerZodType = this.buildZodType(typeInfo.innerType) + let zodType: z.ZodTypeAny = z.array(innerZodType) + + // Apply modifiers if present + if (typeInfo.modifiers) { + zodType = this.applyModifiers(zodType, typeInfo.modifiers) + } + + return zodType + } + + let zodType: z.ZodTypeAny + + // Build base type + switch (typeInfo.base) { + case 'string': + zodType = z.string() + break + case 'number': + zodType = z.number() + break + case 'boolean': + zodType = z.boolean() + break + case 'date': + zodType = z.date() + break + case 'enum': + if (typeInfo.baseArgs && typeInfo.baseArgs[0] && Array.isArray(typeInfo.baseArgs[0])) { + const enumValues = typeInfo.baseArgs[0] as [string, ...string[]] + zodType = z.enum(enumValues) + } else { + throw new Error('enum requires array of values') + } + break + default: + throw new Error(`Unsupported base type: ${typeInfo.base}`) + } + + // Apply modifiers + zodType = this.applyModifiers(zodType, typeInfo.modifiers || []) + + return zodType + } + + private static applyModifiers(zodType: z.ZodTypeAny, modifiers: any[]): z.ZodTypeAny { + for (const modifier of modifiers) { + switch (modifier.name) { + case 'int': + if (zodType._def?.typeName === 'ZodNumber') { + zodType = (zodType as z.ZodNumber).int() + } + break + case 'max': + if (modifier.args[0] !== undefined) { + if (zodType._def?.typeName === 'ZodString') { + zodType = (zodType as z.ZodString).max(modifier.args[0]) + } else if (zodType._def?.typeName === 'ZodArray') { + zodType = (zodType as z.ZodArray).max(modifier.args[0]) + } + } + break + case 'min': + if (modifier.args[0] !== undefined) { + if (zodType._def?.typeName === 'ZodString') { + zodType = (zodType as z.ZodString).min(modifier.args[0]) + } else if (zodType._def?.typeName === 'ZodArray') { + zodType = (zodType as z.ZodArray).min(modifier.args[0]) + } + } + break + case 'optional': + zodType = zodType.optional() + break + case 'array': + zodType = z.array(zodType) + break + case 'describe': + if (modifier.args[0]) { + zodType = zodType.describe(modifier.args[0]) + } + break + case 'default': + if (modifier.args[0] !== undefined) { + zodType = zodType.default(modifier.args[0]) + } + break + default: + // Ignore unknown modifiers for compatibility + break + } + } + return zodType + } +} diff --git a/packages/components/src/speechToText.ts b/packages/components/src/speechToText.ts index fbb659d54..29097655a 100644 --- a/packages/components/src/speechToText.ts +++ b/packages/components/src/speechToText.ts @@ -18,7 +18,7 @@ export const convertSpeechToText = async (upload: IFileUpload, speechToTextConfi if (speechToTextConfig) { const credentialId = speechToTextConfig.credentialId as string const credentialData = await getCredentialData(credentialId ?? '', options) - const audio_file = await getFileFromStorage(upload.name, options.chatflowid, options.chatId) + const audio_file = await getFileFromStorage(upload.name, options.orgId, options.chatflowid, options.chatId) switch (speechToTextConfig.name) { case SpeechToTextType.OPENAI_WHISPER: { diff --git a/packages/components/src/storageUtils.ts b/packages/components/src/storageUtils.ts index a918c4f00..ff48bb056 100644 --- a/packages/components/src/storageUtils.ts +++ b/packages/components/src/storageUtils.ts @@ -1,19 +1,56 @@ -import path from 'path' -import fs from 'fs' import { DeleteObjectsCommand, GetObjectCommand, + ListObjectsCommand, ListObjectsV2Command, PutObjectCommand, S3Client, S3ClientConfig } from '@aws-sdk/client-s3' import { Storage } from '@google-cloud/storage' +import fs from 'fs' import { Readable } from 'node:stream' -import { getUserHome } from './utils' +import path from 'path' import sanitize from 'sanitize-filename' +import { getUserHome } from './utils' +import { isPathTraversal, isValidUUID } from './validator' + +const dirSize = async (directoryPath: string) => { + let totalSize = 0 + + async function calculateSize(itemPath: string) { + const stats = await fs.promises.stat(itemPath) + + if (stats.isFile()) { + totalSize += stats.size + } else if (stats.isDirectory()) { + const files = await fs.promises.readdir(itemPath) + for (const file of files) { + await calculateSize(path.join(itemPath, file)) + } + } + } + + await calculateSize(directoryPath) + return totalSize +} + +export const addBase64FilesToStorage = async ( + fileBase64: string, + chatflowid: string, + fileNames: string[], + orgId: string +): Promise<{ path: string; totalSize: number }> => { + // Validate chatflowid + if (!chatflowid || !isValidUUID(chatflowid)) { + throw new Error('Invalid chatflowId format - must be a valid UUID') + } + + // Check for path traversal attempts + if (isPathTraversal(chatflowid)) { + throw new Error('Invalid path characters detected in chatflowId') + } -export const addBase64FilesToStorage = async (fileBase64: string, chatflowid: string, fileNames: string[]) => { const storageType = getStorageType() if (storageType === 's3') { const { s3Client, Bucket } = getS3Config() @@ -24,8 +61,8 @@ export const addBase64FilesToStorage = async (fileBase64: string, chatflowid: st const mime = splitDataURI[0].split(':')[1].split(';')[0] const sanitizedFilename = _sanitizeFilename(filename) + const Key = orgId + '/' + chatflowid + '/' + sanitizedFilename - const Key = chatflowid + '/' + sanitizedFilename const putObjCmd = new PutObjectCommand({ Bucket, Key, @@ -36,7 +73,9 @@ export const addBase64FilesToStorage = async (fileBase64: string, chatflowid: st await s3Client.send(putObjCmd) fileNames.push(sanitizedFilename) - return 'FILE-STORAGE::' + JSON.stringify(fileNames) + const totalSize = await getS3StorageSize(orgId) + + return { path: 'FILE-STORAGE::' + JSON.stringify(fileNames), totalSize: totalSize / 1024 / 1024 } } else if (storageType === 'gcs') { const { bucket } = getGcsClient() const splitDataURI = fileBase64.split(',') @@ -55,9 +94,11 @@ export const addBase64FilesToStorage = async (fileBase64: string, chatflowid: st .end(bf) }) fileNames.push(sanitizedFilename) - return 'FILE-STORAGE::' + JSON.stringify(fileNames) + const totalSize = await getGCSStorageSize(orgId) + + return { path: 'FILE-STORAGE::' + JSON.stringify(fileNames), totalSize: totalSize / 1024 / 1024 } } else { - const dir = path.join(getStoragePath(), chatflowid) + const dir = path.join(getStoragePath(), orgId, chatflowid) if (!fs.existsSync(dir)) { fs.mkdirSync(dir, { recursive: true }) } @@ -68,13 +109,22 @@ export const addBase64FilesToStorage = async (fileBase64: string, chatflowid: st const sanitizedFilename = _sanitizeFilename(filename) const filePath = path.join(dir, sanitizedFilename) + fs.writeFileSync(filePath, bf) fileNames.push(sanitizedFilename) - return 'FILE-STORAGE::' + JSON.stringify(fileNames) + + const totalSize = await dirSize(path.join(getStoragePath(), orgId)) + return { path: 'FILE-STORAGE::' + JSON.stringify(fileNames), totalSize: totalSize / 1024 / 1024 } } } -export const addArrayFilesToStorage = async (mime: string, bf: Buffer, fileName: string, fileNames: string[], ...paths: string[]) => { +export const addArrayFilesToStorage = async ( + mime: string, + bf: Buffer, + fileName: string, + fileNames: string[], + ...paths: string[] +): Promise<{ path: string; totalSize: number }> => { const storageType = getStorageType() const sanitizedFilename = _sanitizeFilename(fileName) @@ -95,7 +145,10 @@ export const addArrayFilesToStorage = async (mime: string, bf: Buffer, fileName: }) await s3Client.send(putObjCmd) fileNames.push(sanitizedFilename) - return 'FILE-STORAGE::' + JSON.stringify(fileNames) + + const totalSize = await getS3StorageSize(paths[0]) + + return { path: 'FILE-STORAGE::' + JSON.stringify(fileNames), totalSize: totalSize / 1024 / 1024 } } else if (storageType === 'gcs') { const { bucket } = getGcsClient() const normalizedPaths = paths.map((p) => p.replace(/\\/g, '/')) @@ -109,7 +162,10 @@ export const addArrayFilesToStorage = async (mime: string, bf: Buffer, fileName: .end(bf) }) fileNames.push(sanitizedFilename) - return 'FILE-STORAGE::' + JSON.stringify(fileNames) + + const totalSize = await getGCSStorageSize(paths[0]) + + return { path: 'FILE-STORAGE::' + JSON.stringify(fileNames), totalSize: totalSize / 1024 / 1024 } } else { const dir = path.join(getStoragePath(), ...paths.map(_sanitizeFilename)) if (!fs.existsSync(dir)) { @@ -118,11 +174,19 @@ export const addArrayFilesToStorage = async (mime: string, bf: Buffer, fileName: const filePath = path.join(dir, sanitizedFilename) fs.writeFileSync(filePath, bf) fileNames.push(sanitizedFilename) - return 'FILE-STORAGE::' + JSON.stringify(fileNames) + + const totalSize = await dirSize(path.join(getStoragePath(), paths[0])) + + return { path: 'FILE-STORAGE::' + JSON.stringify(fileNames), totalSize: totalSize / 1024 / 1024 } } } -export const addSingleFileToStorage = async (mime: string, bf: Buffer, fileName: string, ...paths: string[]) => { +export const addSingleFileToStorage = async ( + mime: string, + bf: Buffer, + fileName: string, + ...paths: string[] +): Promise<{ path: string; totalSize: number }> => { const storageType = getStorageType() const sanitizedFilename = _sanitizeFilename(fileName) @@ -142,7 +206,10 @@ export const addSingleFileToStorage = async (mime: string, bf: Buffer, fileName: Body: bf }) await s3Client.send(putObjCmd) - return 'FILE-STORAGE::' + sanitizedFilename + + const totalSize = await getS3StorageSize(paths[0]) + + return { path: 'FILE-STORAGE::' + sanitizedFilename, totalSize: totalSize / 1024 / 1024 } } else if (storageType === 'gcs') { const { bucket } = getGcsClient() const normalizedPaths = paths.map((p) => p.replace(/\\/g, '/')) @@ -155,7 +222,10 @@ export const addSingleFileToStorage = async (mime: string, bf: Buffer, fileName: .on('finish', () => resolve()) .end(bf) }) - return 'FILE-STORAGE::' + sanitizedFilename + + const totalSize = await getGCSStorageSize(paths[0]) + + return { path: 'FILE-STORAGE::' + sanitizedFilename, totalSize: totalSize / 1024 / 1024 } } else { const dir = path.join(getStoragePath(), ...paths.map(_sanitizeFilename)) if (!fs.existsSync(dir)) { @@ -163,7 +233,9 @@ export const addSingleFileToStorage = async (mime: string, bf: Buffer, fileName: } const filePath = path.join(dir, sanitizedFilename) fs.writeFileSync(filePath, bf) - return 'FILE-STORAGE::' + sanitizedFilename + + const totalSize = await dirSize(path.join(getStoragePath(), paths[0])) + return { path: 'FILE-STORAGE::' + sanitizedFilename, totalSize: totalSize / 1024 / 1024 } } } @@ -215,41 +287,257 @@ export const getFileFromStorage = async (file: string, ...paths: string[]): Prom Key = Key.substring(1) } - const getParams = { - Bucket, - Key - } + try { + const getParams = { + Bucket, + Key + } - const response = await s3Client.send(new GetObjectCommand(getParams)) - const body = response.Body - if (body instanceof Readable) { - const streamToString = await body.transformToString('base64') - if (streamToString) { - return Buffer.from(streamToString, 'base64') + const response = await s3Client.send(new GetObjectCommand(getParams)) + const body = response.Body + if (body instanceof Readable) { + const streamToString = await body.transformToString('base64') + if (streamToString) { + return Buffer.from(streamToString, 'base64') + } + } + // @ts-ignore + const buffer = Buffer.concat(response.Body.toArray()) + return buffer + } catch (error) { + // Fallback: Check if file exists without the first path element (likely orgId) + if (paths.length > 1) { + const fallbackPaths = paths.slice(1) + let fallbackKey = fallbackPaths.reduce((acc, cur) => acc + '/' + cur, '') + '/' + sanitizedFilename + if (fallbackKey.startsWith('/')) { + fallbackKey = fallbackKey.substring(1) + } + + try { + const fallbackParams = { + Bucket, + Key: fallbackKey + } + const fallbackResponse = await s3Client.send(new GetObjectCommand(fallbackParams)) + const fallbackBody = fallbackResponse.Body + + // Get the file content + let fileContent: Buffer + if (fallbackBody instanceof Readable) { + const streamToString = await fallbackBody.transformToString('base64') + if (streamToString) { + fileContent = Buffer.from(streamToString, 'base64') + } else { + // @ts-ignore + fileContent = Buffer.concat(fallbackBody.toArray()) + } + } else { + // @ts-ignore + fileContent = Buffer.concat(fallbackBody.toArray()) + } + + // Move to correct location with orgId + const putObjCmd = new PutObjectCommand({ + Bucket, + Key, + Body: fileContent + }) + await s3Client.send(putObjCmd) + + // Delete the old file + await s3Client.send( + new DeleteObjectsCommand({ + Bucket, + Delete: { + Objects: [{ Key: fallbackKey }], + Quiet: false + } + }) + ) + + // Check if the directory is empty and delete recursively if needed + if (fallbackPaths.length > 0) { + await _cleanEmptyS3Folders(s3Client, Bucket, fallbackPaths[0]) + } + + return fileContent + } catch (fallbackError) { + // Throw the original error since the fallback also failed + throw error + } + } else { + throw error } } - // @ts-ignore - const buffer = Buffer.concat(response.Body.toArray()) - return buffer } else if (storageType === 'gcs') { const { bucket } = getGcsClient() const normalizedPaths = paths.map((p) => p.replace(/\\/g, '/')) const normalizedFilename = sanitizedFilename.replace(/\\/g, '/') const filePath = [...normalizedPaths, normalizedFilename].join('/') - const file = bucket.file(filePath) - const [buffer] = await file.download() - return buffer + + try { + const file = bucket.file(filePath) + const [buffer] = await file.download() + return buffer + } catch (error) { + // Fallback: Check if file exists without the first path element (likely orgId) + if (normalizedPaths.length > 1) { + const fallbackPaths = normalizedPaths.slice(1) + const fallbackPath = [...fallbackPaths, normalizedFilename].join('/') + + try { + const fallbackFile = bucket.file(fallbackPath) + const [buffer] = await fallbackFile.download() + + // Move to correct location with orgId + const file = bucket.file(filePath) + await new Promise((resolve, reject) => { + file.createWriteStream() + .on('error', (err) => reject(err)) + .on('finish', () => resolve()) + .end(buffer) + }) + + // Delete the old file + await fallbackFile.delete() + + // Check if the directory is empty and delete recursively if needed + if (fallbackPaths.length > 0) { + await _cleanEmptyGCSFolders(bucket, fallbackPaths[0]) + } + + return buffer + } catch (fallbackError) { + // Throw the original error since the fallback also failed + throw error + } + } else { + throw error + } + } } else { - const fileInStorage = path.join(getStoragePath(), ...paths.map(_sanitizeFilename), sanitizedFilename) - return fs.readFileSync(fileInStorage) + try { + const fileInStorage = path.join(getStoragePath(), ...paths.map(_sanitizeFilename), sanitizedFilename) + return fs.readFileSync(fileInStorage) + } catch (error) { + // Fallback: Check if file exists without the first path element (likely orgId) + if (paths.length > 1) { + const fallbackPaths = paths.slice(1) + const fallbackPath = path.join(getStoragePath(), ...fallbackPaths.map(_sanitizeFilename), sanitizedFilename) + + if (fs.existsSync(fallbackPath)) { + // Create directory if it doesn't exist + const targetPath = path.join(getStoragePath(), ...paths.map(_sanitizeFilename), sanitizedFilename) + const dir = path.dirname(targetPath) + if (!fs.existsSync(dir)) { + fs.mkdirSync(dir, { recursive: true }) + } + + // Copy file to correct location with orgId + fs.copyFileSync(fallbackPath, targetPath) + + // Delete the old file + fs.unlinkSync(fallbackPath) + + // Clean up empty directories recursively + if (fallbackPaths.length > 0) { + _cleanEmptyLocalFolders(path.join(getStoragePath(), ...fallbackPaths.map(_sanitizeFilename).slice(0, -1))) + } + + return fs.readFileSync(targetPath) + } else { + throw error + } + } else { + throw error + } + } } } +export const getFilesListFromStorage = async (...paths: string[]): Promise> => { + const storageType = getStorageType() + if (storageType === 's3') { + const { s3Client, Bucket } = getS3Config() + + let Key = paths.reduce((acc, cur) => acc + '/' + cur, '') + if (Key.startsWith('/')) { + Key = Key.substring(1) + } + + const listCommand = new ListObjectsV2Command({ + Bucket, + Prefix: Key + }) + const list = await s3Client.send(listCommand) + + if (list.Contents && list.Contents.length > 0) { + return list.Contents.map((item) => ({ + name: item.Key?.split('/').pop() || '', + path: item.Key ?? '', + size: item.Size || 0 + })) + } else { + return [] + } + } else { + const directory = path.join(getStoragePath(), ...paths) + const filesList = getFilePaths(directory) + return filesList + } +} + +interface FileInfo { + name: string + path: string + size: number +} + +function getFilePaths(dir: string): FileInfo[] { + let results: FileInfo[] = [] + + function readDirectory(directory: string) { + try { + if (!fs.existsSync(directory)) { + console.warn(`Directory does not exist: ${directory}`) + return + } + + const list = fs.readdirSync(directory) + list.forEach((file) => { + const filePath = path.join(directory, file) + try { + const stat = fs.statSync(filePath) + if (stat && stat.isDirectory()) { + readDirectory(filePath) + } else { + const sizeInMB = stat.size / (1024 * 1024) + results.push({ name: file, path: filePath, size: sizeInMB }) + } + } catch (error) { + console.error(`Error processing file ${filePath}:`, error) + } + }) + } catch (error) { + console.error(`Error reading directory ${directory}:`, error) + } + } + + readDirectory(dir) + return results +} + /** * Prepare storage path */ export const getStoragePath = (): string => { - return process.env.BLOB_STORAGE_PATH ? path.join(process.env.BLOB_STORAGE_PATH) : path.join(getUserHome(), '.flowise', 'storage') + const storagePath = process.env.BLOB_STORAGE_PATH + ? path.join(process.env.BLOB_STORAGE_PATH) + : path.join(getUserHome(), '.flowise', 'storage') + if (!fs.existsSync(storagePath)) { + fs.mkdirSync(storagePath, { recursive: true }) + } + return storagePath } /** @@ -267,14 +555,26 @@ export const removeFilesFromStorage = async (...paths: string[]) => { if (Key.startsWith('/')) { Key = Key.substring(1) } + await _deleteS3Folder(Key) + + // check folder size after deleting all the files + const totalSize = await getS3StorageSize(paths[0]) + return { totalSize: totalSize / 1024 / 1024 } } else if (storageType === 'gcs') { const { bucket } = getGcsClient() const normalizedPath = paths.map((p) => p.replace(/\\/g, '/')).join('/') await bucket.deleteFiles({ prefix: `${normalizedPath}/` }) + + const totalSize = await getGCSStorageSize(paths[0]) + return { totalSize: totalSize / 1024 / 1024 } } else { const directory = path.join(getStoragePath(), ...paths.map(_sanitizeFilename)) - _deleteLocalFolderRecursive(directory) + await _deleteLocalFolderRecursive(directory) + + const totalSize = await dirSize(path.join(getStoragePath(), paths[0])) + + return { totalSize: totalSize / 1024 / 1024 } } } @@ -304,6 +604,10 @@ export const removeSpecificFileFromStorage = async (...paths: string[]) => { Key = Key.substring(1) } await _deleteS3Folder(Key) + + // check folder size after deleting all the files + const totalSize = await getS3StorageSize(paths[0]) + return { totalSize: totalSize / 1024 / 1024 } } else if (storageType === 'gcs') { const { bucket } = getGcsClient() const fileName = paths.pop() @@ -313,6 +617,9 @@ export const removeSpecificFileFromStorage = async (...paths: string[]) => { } const normalizedPath = paths.map((p) => p.replace(/\\/g, '/')).join('/') await bucket.file(normalizedPath).delete() + + const totalSize = await getGCSStorageSize(paths[0]) + return { totalSize: totalSize / 1024 / 1024 } } else { const fileName = paths.pop() if (fileName) { @@ -320,7 +627,15 @@ export const removeSpecificFileFromStorage = async (...paths: string[]) => { paths.push(sanitizedFilename) } const file = path.join(getStoragePath(), ...paths.map(_sanitizeFilename)) - fs.unlinkSync(file) + // check if file exists, if not skip delete + // this might happen when user tries to delete a document loader but the attached file is already deleted + const stat = fs.statSync(file, { throwIfNoEntry: false }) + if (stat && stat.isFile()) { + fs.unlinkSync(file) + } + + const totalSize = await dirSize(path.join(getStoragePath(), paths[0])) + return { totalSize: totalSize / 1024 / 1024 } } } @@ -333,52 +648,63 @@ export const removeFolderFromStorage = async (...paths: string[]) => { Key = Key.substring(1) } await _deleteS3Folder(Key) + + // check folder size after deleting all the files + const totalSize = await getS3StorageSize(paths[0]) + return { totalSize: totalSize / 1024 / 1024 } } else if (storageType === 'gcs') { const { bucket } = getGcsClient() const normalizedPath = paths.map((p) => p.replace(/\\/g, '/')).join('/') await bucket.deleteFiles({ prefix: `${normalizedPath}/` }) + + const totalSize = await getGCSStorageSize(paths[0]) + return { totalSize: totalSize / 1024 / 1024 } } else { const directory = path.join(getStoragePath(), ...paths.map(_sanitizeFilename)) - _deleteLocalFolderRecursive(directory, true) + await _deleteLocalFolderRecursive(directory, true) + + const totalSize = await dirSize(path.join(getStoragePath(), paths[0])) + return { totalSize: totalSize / 1024 / 1024 } } } -const _deleteLocalFolderRecursive = (directory: string, deleteParentChatflowFolder?: boolean) => { - // Console error here as failing is not destructive operation - if (fs.existsSync(directory)) { +const _deleteLocalFolderRecursive = async (directory: string, deleteParentChatflowFolder?: boolean) => { + try { + // Check if the path exists + await fs.promises.access(directory) + if (deleteParentChatflowFolder) { - fs.rmSync(directory, { recursive: true, force: true }) - } else { - fs.readdir(directory, (error, files) => { - if (error) console.error('Could not read directory') - - for (let i = 0; i < files.length; i++) { - const file = files[i] - const file_path = path.join(directory, file) - - fs.stat(file_path, (error, stat) => { - if (error) console.error('File do not exist') - - if (!stat.isDirectory()) { - fs.unlink(file_path, (error) => { - if (error) console.error('Could not delete file') - }) - if (i === files.length - 1) { - fs.rmSync(directory, { recursive: true, force: true }) - } - } else { - _deleteLocalFolderRecursive(file_path) - } - }) - } - }) + await fs.promises.rmdir(directory, { recursive: true }) } + + // Get stats of the path to determine if it's a file or directory + const stats = await fs.promises.stat(directory) + + if (stats.isDirectory()) { + // Read all directory contents + const files = await fs.promises.readdir(directory) + + // Recursively delete all contents + for (const file of files) { + const currentPath = path.join(directory, file) + await _deleteLocalFolderRecursive(currentPath) // Recursive call + } + + // Delete the directory itself after emptying it + await fs.promises.rmdir(directory, { recursive: true }) + } else { + // If it's a file, delete it directly + await fs.promises.unlink(directory) + } + } catch (error) { + // Error handling } } const _deleteS3Folder = async (location: string) => { let count = 0 // number of files deleted const { s3Client, Bucket } = getS3Config() + async function recursiveS3Delete(token?: any) { // get the files const listCommand = new ListObjectsV2Command({ @@ -410,6 +736,7 @@ const _deleteS3Folder = async (location: string) => { // return total deleted count when finished return `${count} files deleted from S3` } + // start the recursive function return recursiveS3Delete() } @@ -417,34 +744,130 @@ const _deleteS3Folder = async (location: string) => { export const streamStorageFile = async ( chatflowId: string, chatId: string, - fileName: string + fileName: string, + orgId: string ): Promise => { + // Validate chatflowId + if (!chatflowId || !isValidUUID(chatflowId)) { + throw new Error('Invalid chatflowId format - must be a valid UUID') + } + + // Check for path traversal attempts + if (isPathTraversal(chatflowId) || isPathTraversal(chatId)) { + throw new Error('Invalid path characters detected in chatflowId or chatId') + } + const storageType = getStorageType() const sanitizedFilename = sanitize(fileName) if (storageType === 's3') { const { s3Client, Bucket } = getS3Config() - const Key = chatflowId + '/' + chatId + '/' + sanitizedFilename + const Key = orgId + '/' + chatflowId + '/' + chatId + '/' + sanitizedFilename const getParams = { Bucket, Key } - const response = await s3Client.send(new GetObjectCommand(getParams)) - const body = response.Body - if (body instanceof Readable) { - const blob = await body.transformToByteArray() - return Buffer.from(blob) + try { + const response = await s3Client.send(new GetObjectCommand(getParams)) + const body = response.Body + if (body instanceof Readable) { + const blob = await body.transformToByteArray() + return Buffer.from(blob) + } + } catch (error) { + // Fallback: Check if file exists without orgId + const fallbackKey = chatflowId + '/' + chatId + '/' + sanitizedFilename + try { + const fallbackParams = { + Bucket, + Key: fallbackKey + } + const fallbackResponse = await s3Client.send(new GetObjectCommand(fallbackParams)) + const fallbackBody = fallbackResponse.Body + + // If found, copy to correct location with orgId + if (fallbackBody) { + // Get the file content + let fileContent: Buffer + if (fallbackBody instanceof Readable) { + const blob = await fallbackBody.transformToByteArray() + fileContent = Buffer.from(blob) + } else { + // @ts-ignore + fileContent = Buffer.concat(fallbackBody.toArray()) + } + + // Move to correct location with orgId + const putObjCmd = new PutObjectCommand({ + Bucket, + Key, + Body: fileContent + }) + await s3Client.send(putObjCmd) + + // Delete the old file + await s3Client.send( + new DeleteObjectsCommand({ + Bucket, + Delete: { + Objects: [{ Key: fallbackKey }], + Quiet: false + } + }) + ) + + // Check if the directory is empty and delete recursively if needed + await _cleanEmptyS3Folders(s3Client, Bucket, chatflowId) + + return fileContent + } + } catch (fallbackError) { + // File not found in fallback location either + throw new Error(`File ${fileName} not found`) + } } } else if (storageType === 'gcs') { const { bucket } = getGcsClient() const normalizedChatflowId = chatflowId.replace(/\\/g, '/') const normalizedChatId = chatId.replace(/\\/g, '/') const normalizedFilename = sanitizedFilename.replace(/\\/g, '/') - const filePath = `${normalizedChatflowId}/${normalizedChatId}/${normalizedFilename}` - const [buffer] = await bucket.file(filePath).download() - return buffer + const filePath = `${orgId}/${normalizedChatflowId}/${normalizedChatId}/${normalizedFilename}` + + try { + const [buffer] = await bucket.file(filePath).download() + return buffer + } catch (error) { + // Fallback: Check if file exists without orgId + const fallbackPath = `${normalizedChatflowId}/${normalizedChatId}/${normalizedFilename}` + try { + const fallbackFile = bucket.file(fallbackPath) + const [buffer] = await fallbackFile.download() + + // If found, copy to correct location with orgId + if (buffer) { + const file = bucket.file(filePath) + await new Promise((resolve, reject) => { + file.createWriteStream() + .on('error', (err) => reject(err)) + .on('finish', () => resolve()) + .end(buffer) + }) + + // Delete the old file + await fallbackFile.delete() + + // Check if the directory is empty and delete recursively if needed + await _cleanEmptyGCSFolders(bucket, normalizedChatflowId) + + return buffer + } + } catch (fallbackError) { + // File not found in fallback location either + throw new Error(`File ${fileName} not found`) + } + } } else { - const filePath = path.join(getStoragePath(), chatflowId, chatId, sanitizedFilename) + const filePath = path.join(getStoragePath(), orgId, chatflowId, chatId, sanitizedFilename) //raise error if file path is not absolute if (!path.isAbsolute(filePath)) throw new Error(`Invalid file path`) //raise error if file path contains '..' @@ -455,25 +878,170 @@ export const streamStorageFile = async ( if (fs.existsSync(filePath)) { return fs.createReadStream(filePath) } else { - throw new Error(`File ${fileName} not found`) + // Fallback: Check if file exists without orgId + const fallbackPath = path.join(getStoragePath(), chatflowId, chatId, sanitizedFilename) + + if (fs.existsSync(fallbackPath)) { + // Create directory if it doesn't exist + const dir = path.dirname(filePath) + if (!fs.existsSync(dir)) { + fs.mkdirSync(dir, { recursive: true }) + } + + // Copy file to correct location with orgId + fs.copyFileSync(fallbackPath, filePath) + + // Delete the old file + fs.unlinkSync(fallbackPath) + + // Clean up empty directories recursively + _cleanEmptyLocalFolders(path.join(getStoragePath(), chatflowId, chatId)) + + return fs.createReadStream(filePath) + } else { + throw new Error(`File ${fileName} not found`) + } } } } +/** + * Check if a local directory is empty and delete it if so, + * then check parent directories recursively + */ +const _cleanEmptyLocalFolders = (dirPath: string) => { + try { + // Stop if we reach the storage root + if (dirPath === getStoragePath()) return + + // Check if directory exists + if (!fs.existsSync(dirPath)) return + + // Read directory contents + const files = fs.readdirSync(dirPath) + + // If directory is empty, delete it and check parent + if (files.length === 0) { + fs.rmdirSync(dirPath) + // Recursively check parent directory + _cleanEmptyLocalFolders(path.dirname(dirPath)) + } + } catch (error) { + // Ignore errors during cleanup + console.error('Error cleaning empty folders:', error) + } +} + +/** + * Check if an S3 "folder" is empty and delete it recursively + */ +const _cleanEmptyS3Folders = async (s3Client: S3Client, Bucket: string, prefix: string) => { + try { + // Skip if prefix is empty + if (!prefix) return + + // List objects in this "folder" + const listCmd = new ListObjectsV2Command({ + Bucket, + Prefix: prefix + '/', + Delimiter: '/' + }) + + const response = await s3Client.send(listCmd) + + // If folder is empty (only contains common prefixes but no files) + if ( + (response.Contents?.length === 0 || !response.Contents) && + (response.CommonPrefixes?.length === 0 || !response.CommonPrefixes) + ) { + // Delete the folder marker if it exists + await s3Client.send( + new DeleteObjectsCommand({ + Bucket, + Delete: { + Objects: [{ Key: prefix + '/' }], + Quiet: true + } + }) + ) + + // Recursively check parent folder + const parentPrefix = prefix.substring(0, prefix.lastIndexOf('/')) + if (parentPrefix) { + await _cleanEmptyS3Folders(s3Client, Bucket, parentPrefix) + } + } + } catch (error) { + // Ignore errors during cleanup + console.error('Error cleaning empty S3 folders:', error) + } +} + +/** + * Check if a GCS "folder" is empty and delete recursively if so + */ +const _cleanEmptyGCSFolders = async (bucket: any, prefix: string) => { + try { + // Skip if prefix is empty + if (!prefix) return + + // List files with this prefix + const [files] = await bucket.getFiles({ + prefix: prefix + '/', + delimiter: '/' + }) + + // If folder is empty (no files) + if (files.length === 0) { + // Delete the folder marker if it exists + try { + await bucket.file(prefix + '/').delete() + } catch (err) { + // Folder marker might not exist, ignore + } + + // Recursively check parent folder + const parentPrefix = prefix.substring(0, prefix.lastIndexOf('/')) + if (parentPrefix) { + await _cleanEmptyGCSFolders(bucket, parentPrefix) + } + } + } catch (error) { + // Ignore errors during cleanup + console.error('Error cleaning empty GCS folders:', error) + } +} + +export const getGCSStorageSize = async (orgId: string): Promise => { + const { bucket } = getGcsClient() + let totalSize = 0 + + const [files] = await bucket.getFiles({ prefix: orgId }) + + for (const file of files) { + const size = file.metadata.size + // Handle different types that size could be + if (typeof size === 'string') { + totalSize += parseInt(size, 10) || 0 + } else if (typeof size === 'number') { + totalSize += size + } + } + + return totalSize +} + export const getGcsClient = () => { const pathToGcsCredential = process.env.GOOGLE_CLOUD_STORAGE_CREDENTIAL const projectId = process.env.GOOGLE_CLOUD_STORAGE_PROJ_ID const bucketName = process.env.GOOGLE_CLOUD_STORAGE_BUCKET_NAME - if (!pathToGcsCredential) { - throw new Error('GOOGLE_CLOUD_STORAGE_CREDENTIAL env variable is required') - } if (!bucketName) { throw new Error('GOOGLE_CLOUD_STORAGE_BUCKET_NAME env variable is required') } const storageConfig = { - keyFilename: pathToGcsCredential, + ...(pathToGcsCredential ? { keyFilename: pathToGcsCredential } : {}), ...(projectId ? { projectId } : {}) } @@ -482,6 +1050,20 @@ export const getGcsClient = () => { return { storage, bucket } } +export const getS3StorageSize = async (orgId: string): Promise => { + const { s3Client, Bucket } = getS3Config() + const getCmd = new ListObjectsCommand({ + Bucket, + Prefix: orgId + }) + const headObj = await s3Client.send(getCmd) + let totalSize = 0 + for (const obj of headObj.Contents || []) { + totalSize += obj.Size || 0 + } + return totalSize +} + export const getS3Config = () => { const accessKeyId = process.env.S3_STORAGE_ACCESS_KEY_ID const secretAccessKey = process.env.S3_STORAGE_SECRET_ACCESS_KEY @@ -490,17 +1072,21 @@ export const getS3Config = () => { const customURL = process.env.S3_ENDPOINT_URL const forcePathStyle = process.env.S3_FORCE_PATH_STYLE === 'true' ? true : false - if (!region || !Bucket) { + if (!region || region.trim() === '' || !Bucket || Bucket.trim() === '') { throw new Error('S3 storage configuration is missing') } const s3Config: S3ClientConfig = { region: region, - endpoint: customURL, forcePathStyle: forcePathStyle } - if (accessKeyId && secretAccessKey) { + // Only include endpoint if customURL is not empty + if (customURL && customURL.trim() !== '') { + s3Config.endpoint = customURL + } + + if (accessKeyId && accessKeyId.trim() !== '' && secretAccessKey && secretAccessKey.trim() !== '') { s3Config.credentials = { accessKeyId: accessKeyId, secretAccessKey: secretAccessKey diff --git a/packages/components/src/textToSpeech.ts b/packages/components/src/textToSpeech.ts new file mode 100644 index 000000000..fdc17af65 --- /dev/null +++ b/packages/components/src/textToSpeech.ts @@ -0,0 +1,240 @@ +import { ICommonObject } from './Interface' +import { getCredentialData } from './utils' +import OpenAI from 'openai' +import { ElevenLabsClient } from '@elevenlabs/elevenlabs-js' +import { Readable } from 'node:stream' +import type { ReadableStream } from 'node:stream/web' + +const TextToSpeechType = { + OPENAI_TTS: 'openai', + ELEVEN_LABS_TTS: 'elevenlabs' +} + +export const convertTextToSpeechStream = async ( + text: string, + textToSpeechConfig: ICommonObject, + options: ICommonObject, + abortController: AbortController, + onStart: (format: string) => void, + onChunk: (chunk: Buffer) => void, + onEnd: () => void +): Promise => { + return new Promise((resolve, reject) => { + let streamDestroyed = false + + // Handle abort signal early + if (abortController.signal.aborted) { + reject(new Error('TTS generation aborted')) + return + } + const processStream = async () => { + try { + if (textToSpeechConfig) { + const credentialId = textToSpeechConfig.credentialId as string + const credentialData = await getCredentialData(credentialId ?? '', options) + + switch (textToSpeechConfig.name) { + case TextToSpeechType.OPENAI_TTS: { + onStart('mp3') + + const openai = new OpenAI({ + apiKey: credentialData.openAIApiKey + }) + + const response = await openai.audio.speech.create( + { + model: 'gpt-4o-mini-tts', + voice: (textToSpeechConfig.voice || 'alloy') as + | 'alloy' + | 'ash' + | 'ballad' + | 'coral' + | 'echo' + | 'fable' + | 'nova' + | 'onyx' + | 'sage' + | 'shimmer', + input: text, + response_format: 'mp3' + }, + { + signal: abortController.signal + } + ) + + const stream = response.body as unknown as Readable + if (!stream) { + throw new Error('Failed to get response stream') + } + + await processStreamWithRateLimit(stream, onChunk, onEnd, resolve, reject, 640, 20, abortController, () => { + streamDestroyed = true + }) + break + } + + case TextToSpeechType.ELEVEN_LABS_TTS: { + onStart('mp3') + + const client = new ElevenLabsClient({ + apiKey: credentialData.elevenLabsApiKey + }) + + const response = await client.textToSpeech.stream( + textToSpeechConfig.voice || '21m00Tcm4TlvDq8ikWAM', + { + text: text, + modelId: 'eleven_multilingual_v2' + }, + { abortSignal: abortController.signal } + ) + + const stream = Readable.fromWeb(response as unknown as ReadableStream) + if (!stream) { + throw new Error('Failed to get response stream') + } + + await processStreamWithRateLimit(stream, onChunk, onEnd, resolve, reject, 640, 40, abortController, () => { + streamDestroyed = true + }) + break + } + } + } else { + reject(new Error('Text to speech is not selected. Please configure TTS in the chatflow.')) + } + } catch (error) { + reject(error) + } + } + + // Handle abort signal + abortController.signal.addEventListener('abort', () => { + if (!streamDestroyed) { + reject(new Error('TTS generation aborted')) + } + }) + + processStream() + }) +} + +const processStreamWithRateLimit = async ( + stream: Readable, + onChunk: (chunk: Buffer) => void, + onEnd: () => void, + resolve: () => void, + reject: (error: any) => void, + targetChunkSize: number = 640, + rateLimitMs: number = 20, + abortController: AbortController, + onStreamDestroy?: () => void +) => { + const TARGET_CHUNK_SIZE = targetChunkSize + const RATE_LIMIT_MS = rateLimitMs + + let buffer: Buffer = Buffer.alloc(0) + let isEnded = false + + const processChunks = async () => { + while (!isEnded || buffer.length > 0) { + // Check if aborted + if (abortController.signal.aborted) { + if (!stream.destroyed) { + stream.destroy() + } + onStreamDestroy?.() + reject(new Error('TTS generation aborted')) + return + } + + if (buffer.length >= TARGET_CHUNK_SIZE) { + const chunk = buffer.subarray(0, TARGET_CHUNK_SIZE) + buffer = buffer.subarray(TARGET_CHUNK_SIZE) + onChunk(chunk) + await sleep(RATE_LIMIT_MS) + } else if (isEnded && buffer.length > 0) { + onChunk(buffer) + buffer = Buffer.alloc(0) + } else if (!isEnded) { + await sleep(RATE_LIMIT_MS) + } else { + break + } + } + + onEnd() + resolve() + } + + stream.on('data', (chunk) => { + if (!abortController.signal.aborted) { + buffer = Buffer.concat([buffer, Buffer.from(chunk)]) + } + }) + + stream.on('end', () => { + isEnded = true + }) + + stream.on('error', (error) => { + reject(error) + }) + + // Handle abort signal + abortController.signal.addEventListener('abort', () => { + if (!stream.destroyed) { + stream.destroy() + } + onStreamDestroy?.() + reject(new Error('TTS generation aborted')) + }) + + processChunks().catch(reject) +} + +const sleep = (ms: number): Promise => { + return new Promise((resolve) => setTimeout(resolve, ms)) +} + +export const getVoices = async (provider: string, credentialId: string, options: ICommonObject) => { + const credentialData = await getCredentialData(credentialId ?? '', options) + + switch (provider) { + case TextToSpeechType.OPENAI_TTS: + return [ + { id: 'alloy', name: 'Alloy' }, + { id: 'ash', name: 'Ash' }, + { id: 'ballad', name: 'Ballad' }, + { id: 'coral', name: 'Coral' }, + { id: 'echo', name: 'Echo' }, + { id: 'fable', name: 'Fable' }, + { id: 'nova', name: 'Nova' }, + { id: 'onyx', name: 'Onyx' }, + { id: 'sage', name: 'Sage' }, + { id: 'shimmer', name: 'Shimmer' } + ] + + case TextToSpeechType.ELEVEN_LABS_TTS: { + const client = new ElevenLabsClient({ + apiKey: credentialData.elevenLabsApiKey + }) + + const voices = await client.voices.search({ + pageSize: 100, + voiceType: 'default', + category: 'premade' + }) + + return voices.voices.map((voice) => ({ + id: voice.voiceId, + name: voice.name, + category: voice.category + })) + } + + default: + throw new Error(`Unsupported TTS provider: ${provider}`) + } +} diff --git a/packages/components/src/utils.ts b/packages/components/src/utils.ts index 957fd4992..91fc75454 100644 --- a/packages/components/src/utils.ts +++ b/packages/components/src/utils.ts @@ -4,10 +4,12 @@ import * as fs from 'fs' import * as path from 'path' import { JSDOM } from 'jsdom' import { z } from 'zod' -import { DataSource } from 'typeorm' +import { cloneDeep, omit, get } from 'lodash' +import TurndownService from 'turndown' +import { DataSource, Equal } from 'typeorm' import { ICommonObject, IDatabaseEntity, IFileUpload, IMessage, INodeData, IVariable, MessageContentImageUrl } from './Interface' +import { BaseChatModel } from '@langchain/core/language_models/chat_models' import { AES, enc } from 'crypto-js' -import { omit } from 'lodash' import { AIMessage, HumanMessage, BaseMessage } from '@langchain/core/messages' import { Document } from '@langchain/core/documents' import { getFileFromStorage } from './storageUtils' @@ -15,6 +17,10 @@ import { GetSecretValueCommand, SecretsManagerClient, SecretsManagerClientConfig import { customGet } from '../nodes/sequentialagents/commonUtils' import { TextSplitter } from 'langchain/text_splitter' import { DocumentLoader } from 'langchain/document_loaders/base' +import { NodeVM } from '@flowiseai/nodevm' +import { Sandbox } from '@e2b/code-interpreter' +import { secureFetch, checkDenyList, secureAxiosRequest } from './httpSecurity' +import JSON5 from 'json5' export const numberOrExpressionRegex = '^(\\d+\\.?\\d*|{{.*}})$' //return true if string consists only numbers OR expression {{}} export const notEmptyRegex = '(.|\\s)*\\S(.|\\s)*' //return true if string is not empty or blank @@ -80,7 +86,6 @@ export const availableDependencies = [ '@upstash/redis', '@zilliz/milvus2-sdk-node', 'apify-client', - 'axios', 'cheerio', 'chromadb', 'cohere-ai', @@ -98,10 +103,8 @@ export const availableDependencies = [ 'linkifyjs', 'lunary', 'mammoth', - 'moment', 'mongodb', 'mysql2', - 'node-fetch', 'node-html-markdown', 'notion-to-md', 'openai', @@ -117,6 +120,8 @@ export const availableDependencies = [ 'weaviate-ts-client' ] +const defaultAllowExternalDependencies = ['axios', 'moment', 'node-fetch'] + export const defaultAllowBuiltInDep = [ 'assert', 'buffer', @@ -418,7 +423,7 @@ async function crawl(baseURL: string, currentURL: string, pages: string[], limit if (process.env.DEBUG === 'true') console.info(`actively crawling ${currentURL}`) try { - const resp = await fetch(currentURL) + const resp = await secureFetch(currentURL) if (resp.status > 399) { if (process.env.DEBUG === 'true') console.error(`error in fetch with status code: ${resp.status}, on page: ${currentURL}`) @@ -449,6 +454,8 @@ async function crawl(baseURL: string, currentURL: string, pages: string[], limit * @returns {Promise} */ export async function webCrawl(stringURL: string, limit: number): Promise { + await checkDenyList(stringURL) + const URLObj = new URL(stringURL) const modifyURL = stringURL.slice(-1) === '/' ? stringURL.slice(0, -1) : stringURL return await crawl(URLObj.protocol + '//' + URLObj.hostname, modifyURL, [], limit) @@ -472,7 +479,7 @@ export async function xmlScrape(currentURL: string, limit: number): Promise 399) { if (process.env.DEBUG === 'true') console.error(`error in fetch with status code: ${resp.status}, on page: ${currentURL}`) @@ -706,7 +713,7 @@ export const getUserHome = (): string => { * @param {IChatMessage[]} chatmessages * @returns {BaseMessage[]} */ -export const mapChatMessageToBaseMessage = async (chatmessages: any[] = []): Promise => { +export const mapChatMessageToBaseMessage = async (chatmessages: any[] = [], orgId: string): Promise => { const chatHistory = [] for (const message of chatmessages) { @@ -722,7 +729,7 @@ export const mapChatMessageToBaseMessage = async (chatmessages: any[] = []): Pro const imageContents: MessageContentImageUrl[] = [] for (const upload of uploads) { if (upload.type === 'stored-file' && upload.mime.startsWith('image/')) { - const fileData = await getFileFromStorage(upload.name, message.chatflowid, message.chatId) + const fileData = await getFileFromStorage(upload.name, orgId, message.chatflowid, message.chatId) // as the image is stored in the server, read the file and convert it to base64 const bf = 'data:' + upload.mime + ';base64,' + fileData.toString('base64') @@ -746,7 +753,8 @@ export const mapChatMessageToBaseMessage = async (chatmessages: any[] = []): Pro const options = { retrieveAttachmentChatId: true, chatflowid: message.chatflowid, - chatId: message.chatId + chatId: message.chatId, + orgId } let fileInputFieldFromMimeType = 'txtFile' fileInputFieldFromMimeType = mapMimeTypeToInputField(upload.mime) @@ -756,7 +764,7 @@ export const mapChatMessageToBaseMessage = async (chatmessages: any[] = []): Pro } } const documents: string = await fileLoaderNodeInstance.init(nodeData, '', options) - messageWithFileUploads += `${documents}\n\n` + messageWithFileUploads += `${handleEscapeCharacters(documents, true)}\n\n` } } const messageContent = messageWithFileUploads ? `${messageWithFileUploads}\n\n${message.content}` : message.content @@ -935,8 +943,19 @@ export const convertMultiOptionsToStringArray = (inputString: string): string[] * @param {IDatabaseEntity} databaseEntities * @param {INodeData} nodeData */ -export const getVars = async (appDataSource: DataSource, databaseEntities: IDatabaseEntity, nodeData: INodeData) => { - const variables = ((await appDataSource.getRepository(databaseEntities['Variable']).find()) as IVariable[]) ?? [] +export const getVars = async ( + appDataSource: DataSource, + databaseEntities: IDatabaseEntity, + nodeData: INodeData, + options: ICommonObject +) => { + if (!options.workspaceId) { + return [] + } + const variables = + ((await appDataSource + .getRepository(databaseEntities['Variable']) + .findBy({ workspaceId: Equal(options.workspaceId) })) as IVariable[]) ?? [] // override variables defined in overrideConfig // nodeData.inputs.vars is an Object, check each property and override the variable @@ -1057,7 +1076,17 @@ export const mapMimeTypeToInputField = (mimeType: string) => { case 'text/jsonl': return 'jsonlinesFile' case 'application/vnd.openxmlformats-officedocument.wordprocessingml.document': + case 'application/msword': { return 'docxFile' + } + case 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': + case 'application/vnd.ms-excel': { + return 'excelFile' + } + case 'application/vnd.openxmlformats-officedocument.presentationml.presentation': + case 'application/vnd.ms-powerpoint': { + return 'powerpointFile' + } case 'application/vnd.yaml': case 'application/x-yaml': case 'text/vnd.yaml': @@ -1078,6 +1107,19 @@ export const mapMimeTypeToExt = (mimeType: string) => { switch (mimeType) { case 'text/plain': return 'txt' + case 'text/html': + return 'html' + case 'text/css': + return 'css' + case 'text/javascript': + case 'application/javascript': + return 'js' + case 'text/xml': + case 'application/xml': + return 'xml' + case 'text/markdown': + case 'text/x-markdown': + return 'md' case 'application/pdf': return 'pdf' case 'application/json': @@ -1096,6 +1138,10 @@ export const mapMimeTypeToExt = (mimeType: string) => { return 'xls' case 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': return 'xlsx' + case 'application/vnd.ms-powerpoint': + return 'ppt' + case 'application/vnd.openxmlformats-officedocument.presentationml.presentation': + return 'pptx' default: return '' } @@ -1206,3 +1252,850 @@ export const handleDocumentLoaderDocuments = async (loader: DocumentLoader, text return docs } + +/** + * Normalize special characters in key to be used in vector store + * @param str - Key to normalize + * @returns Normalized key + */ +export const normalizeSpecialChars = (str: string) => { + return str.replace(/[^a-zA-Z0-9_]/g, '_') +} + +/** + * recursively normalize object keys + * @param data - Object to normalize + * @returns Normalized object + */ +export const normalizeKeysRecursively = (data: any): any => { + if (Array.isArray(data)) { + return data.map(normalizeKeysRecursively) + } + + if (data !== null && typeof data === 'object') { + return Object.entries(data).reduce((acc, [key, value]) => { + const newKey = normalizeSpecialChars(key) + acc[newKey] = normalizeKeysRecursively(value) + return acc + }, {} as Record) + } + return data +} + +/** + * Check if OAuth2 token is expired and refresh if needed + * @param {string} credentialId + * @param {ICommonObject} credentialData + * @param {ICommonObject} options + * @param {number} bufferTimeMs - Buffer time in milliseconds before expiry (default: 5 minutes) + * @returns {Promise} + */ +export const refreshOAuth2Token = async ( + credentialId: string, + credentialData: ICommonObject, + options: ICommonObject, + bufferTimeMs: number = 5 * 60 * 1000 +): Promise => { + // Check if token is expired and refresh if needed + if (credentialData.expires_at) { + const expiryTime = new Date(credentialData.expires_at) + const currentTime = new Date() + + if (currentTime.getTime() > expiryTime.getTime() - bufferTimeMs) { + if (!credentialData.refresh_token) { + throw new Error('Access token is expired and no refresh token is available. Please re-authorize the credential.') + } + + try { + // Import fetch dynamically to avoid issues + const fetch = (await import('node-fetch')).default + + // Call the refresh API endpoint + const refreshResponse = await fetch( + `${options.baseURL || 'http://localhost:3000'}/api/v1/oauth2-credential/refresh/${credentialId}`, + { + method: 'POST', + headers: { + 'Content-Type': 'application/json' + } + } + ) + + if (!refreshResponse.ok) { + const errorData = await refreshResponse.text() + throw new Error(`Failed to refresh token: ${refreshResponse.status} ${refreshResponse.statusText} - ${errorData}`) + } + + await refreshResponse.json() + + // Get the updated credential data + const updatedCredentialData = await getCredentialData(credentialId, options) + + return updatedCredentialData + } catch (error) { + console.error('Failed to refresh access token:', error) + throw new Error( + `Failed to refresh access token: ${ + error instanceof Error ? error.message : 'Unknown error' + }. Please re-authorize the credential.` + ) + } + } + } + + // Token is not expired, return original data + return credentialData +} + +export const stripHTMLFromToolInput = (input: string) => { + const turndownService = new TurndownService() + let cleanedInput = turndownService.turndown(input) + // After conversion, replace any escaped underscores and square brackets with regular unescaped ones + cleanedInput = cleanedInput.replace(/\\([_[\]])/g, '$1') + return cleanedInput +} + +// Helper function to convert require statements to ESM imports +const convertRequireToImport = (requireLine: string): string | null => { + // Remove leading/trailing whitespace and get the indentation + const indent = requireLine.match(/^(\s*)/)?.[1] || '' + const trimmed = requireLine.trim() + + // Match patterns like: const/let/var name = require('module') + const defaultRequireMatch = trimmed.match(/^(const|let|var)\s+(\w+)\s*=\s*require\s*\(\s*['"`]([^'"`]+)['"`]\s*\)/) + if (defaultRequireMatch) { + const [, , varName, moduleName] = defaultRequireMatch + return `${indent}import ${varName} from '${moduleName}';` + } + + // Match patterns like: const { name1, name2 } = require('module') + const destructureMatch = trimmed.match(/^(const|let|var)\s+\{\s*([^}]+)\s*\}\s*=\s*require\s*\(\s*['"`]([^'"`]+)['"`]\s*\)/) + if (destructureMatch) { + const [, , destructuredVars, moduleName] = destructureMatch + return `${indent}import { ${destructuredVars.trim()} } from '${moduleName}';` + } + + // Match patterns like: const name = require('module').property + const propertyMatch = trimmed.match(/^(const|let|var)\s+(\w+)\s*=\s*require\s*\(\s*['"`]([^'"`]+)['"`]\s*\)\.(\w+)/) + if (propertyMatch) { + const [, , varName, moduleName, property] = propertyMatch + return `${indent}import { ${property} as ${varName} } from '${moduleName}';` + } + + // If no pattern matches, return null to skip conversion + return null +} + +/** + * Parse output if it's a stringified JSON or array + * @param {any} output - The output to parse + * @returns {any} - The parsed output or original output if not parseable + */ +const parseOutput = (output: any): any => { + // If output is not a string, return as-is + if (typeof output !== 'string') { + return output + } + + // Trim whitespace + const trimmedOutput = output.trim() + + // Check if it's an empty string + if (!trimmedOutput) { + return output + } + + // Check if it looks like JSON (starts with { or [) + if ((trimmedOutput.startsWith('{') && trimmedOutput.endsWith('}')) || (trimmedOutput.startsWith('[') && trimmedOutput.endsWith(']'))) { + try { + const parsedOutput = parseJsonBody(trimmedOutput) + return parsedOutput + } catch (e) { + return output + } + } + + // Return the original string if it doesn't look like JSON + return output +} + +/** + * Execute JavaScript code using either Sandbox or NodeVM + * @param {string} code - The JavaScript code to execute + * @param {ICommonObject} sandbox - The sandbox object with variables + * @param {ICommonObject} options - Execution options + * @returns {Promise} - The execution result + */ +export const executeJavaScriptCode = async ( + code: string, + sandbox: ICommonObject, + options: { + timeout?: number + useSandbox?: boolean + libraries?: string[] + streamOutput?: (output: string) => void + nodeVMOptions?: ICommonObject + } = {} +): Promise => { + const { timeout = 300000, useSandbox = true, streamOutput, libraries = [], nodeVMOptions = {} } = options + const shouldUseSandbox = useSandbox && process.env.E2B_APIKEY + let timeoutMs = timeout + if (process.env.SANDBOX_TIMEOUT) { + timeoutMs = parseInt(process.env.SANDBOX_TIMEOUT, 10) + } + + if (shouldUseSandbox) { + try { + const variableDeclarations = [] + + if (sandbox['$vars']) { + variableDeclarations.push(`const $vars = ${JSON.stringify(sandbox['$vars'])};`) + } + + if (sandbox['$flow']) { + variableDeclarations.push(`const $flow = ${JSON.stringify(sandbox['$flow'])};`) + } + + // Add other sandbox variables + for (const [key, value] of Object.entries(sandbox)) { + if ( + key !== '$vars' && + key !== '$flow' && + key !== 'util' && + key !== 'Symbol' && + key !== 'child_process' && + key !== 'fs' && + key !== 'process' + ) { + variableDeclarations.push(`const ${key} = ${JSON.stringify(value)};`) + } + } + + // Handle import statements properly - they must be at the top + const lines = code.split('\n') + const importLines = [] + const otherLines = [] + + for (const line of lines) { + const trimmedLine = line.trim() + + // Skip node-fetch imports since Node.js has built-in fetch + if (trimmedLine.includes('node-fetch') || trimmedLine.includes("'fetch'") || trimmedLine.includes('"fetch"')) { + continue // Skip this line entirely + } + + // Check for existing ES6 imports and exports + if (trimmedLine.startsWith('import ') || trimmedLine.startsWith('export ')) { + importLines.push(line) + } + // Check for CommonJS require statements and convert them to ESM imports + else if (/^(const|let|var)\s+.*=\s*require\s*\(/.test(trimmedLine)) { + const convertedImport = convertRequireToImport(trimmedLine) + if (convertedImport) { + importLines.push(convertedImport) + } + } else { + otherLines.push(line) + } + } + + const sbx = await Sandbox.create({ apiKey: process.env.E2B_APIKEY, timeoutMs }) + + // Determine which libraries to install + const librariesToInstall = new Set(libraries) + + // Auto-detect required libraries from code + // Extract required modules from import/require statements + const importRegex = /(?:import\s+.*?\s+from\s+['"]([^'"]+)['"]|require\s*\(\s*['"]([^'"]+)['"]\s*\))/g + let match + while ((match = importRegex.exec(code)) !== null) { + const moduleName = match[1] || match[2] + // Extract base module name (e.g., 'typeorm' from 'typeorm/something') + const baseModuleName = moduleName.split('/')[0] + librariesToInstall.add(baseModuleName) + } + + // Install libraries + for (const library of librariesToInstall) { + // Validate library name to prevent command injection. + const validPackageNameRegex = /^(@[a-z0-9-~][a-z0-9-._~]*\/)?[a-z0-9-~][a-z0-9-._~]*$/ + if (validPackageNameRegex.test(library)) { + await sbx.commands.run(`npm install ${library}`) + } else { + console.warn(`[Sandbox] Skipping installation of invalid module: ${library}`) + } + } + + // Separate imports from the rest of the code for proper ES6 module structure + const codeWithImports = [ + ...importLines, + `module.exports = async function() {`, + ...variableDeclarations, + ...otherLines, + `}()` + ].join('\n') + + const execution = await sbx.runCode(codeWithImports, { language: 'js' }) + + let output = '' + + if (execution.text) output = execution.text + if (!execution.text && execution.logs.stdout.length) output = execution.logs.stdout.join('\n') + + if (execution.error) { + throw new Error(`${execution.error.name}: ${execution.error.value}`) + } + + if (execution.logs.stderr.length) { + throw new Error(execution.logs.stderr.join('\n')) + } + + // Stream output if streaming function provided + if (streamOutput && output) { + streamOutput(output) + } + + // Clean up sandbox + sbx.kill() + + return parseOutput(output) + } catch (e) { + throw new Error(`Sandbox Execution Error: ${e}`) + } + } else { + const builtinDeps = process.env.TOOL_FUNCTION_BUILTIN_DEP + ? defaultAllowBuiltInDep.concat(process.env.TOOL_FUNCTION_BUILTIN_DEP.split(',')) + : defaultAllowBuiltInDep + const externalDeps = process.env.TOOL_FUNCTION_EXTERNAL_DEP ? process.env.TOOL_FUNCTION_EXTERNAL_DEP.split(',') : [] + let deps = process.env.ALLOW_BUILTIN_DEP === 'true' ? availableDependencies.concat(externalDeps) : externalDeps + deps.push(...defaultAllowExternalDependencies) + deps = [...new Set(deps)] + + // Create secure wrappers for HTTP libraries + const secureWrappers: ICommonObject = {} + + // Axios + const secureAxiosWrapper = async (config: any) => { + return await secureAxiosRequest(config) + } + secureAxiosWrapper.get = async (url: string, config: any = {}) => secureAxiosWrapper({ ...config, method: 'GET', url }) + secureAxiosWrapper.post = async (url: string, data: any, config: any = {}) => + secureAxiosWrapper({ ...config, method: 'POST', url, data }) + secureAxiosWrapper.put = async (url: string, data: any, config: any = {}) => + secureAxiosWrapper({ ...config, method: 'PUT', url, data }) + secureAxiosWrapper.delete = async (url: string, config: any = {}) => secureAxiosWrapper({ ...config, method: 'DELETE', url }) + secureAxiosWrapper.patch = async (url: string, data: any, config: any = {}) => + secureAxiosWrapper({ ...config, method: 'PATCH', url, data }) + + secureWrappers['axios'] = secureAxiosWrapper + + // Node Fetch + const secureNodeFetch = async (url: string, options: any = {}) => { + return await secureFetch(url, options) + } + secureWrappers['node-fetch'] = secureNodeFetch + + const defaultNodeVMOptions: any = { + console: 'inherit', + sandbox, + require: { + external: { + modules: deps, + transitive: false // Prevent transitive dependencies + }, + builtin: builtinDeps, + mock: secureWrappers // Replace HTTP libraries with secure wrappers + }, + eval: false, + wasm: false, + timeout: timeoutMs + } + + // Merge with custom nodeVMOptions if provided + const finalNodeVMOptions = { ...defaultNodeVMOptions, ...nodeVMOptions } + + const vm = new NodeVM(finalNodeVMOptions) + + try { + const response = await vm.run(`module.exports = async function() {${code}}()`, __dirname) + + let finalOutput = response + + // Stream output if streaming function provided + if (streamOutput && finalOutput) { + let streamOutputString = finalOutput + if (typeof response === 'object') { + streamOutputString = JSON.stringify(finalOutput, null, 2) + } + streamOutput(streamOutputString) + } + + return parseOutput(finalOutput) + } catch (e) { + throw new Error(`NodeVM Execution Error: ${e}`) + } + } +} + +/** + * Create a standard sandbox object for code execution + * @param {string} input - The input string + * @param {ICommonObject} variables - Variables from getVars + * @param {ICommonObject} flow - Flow object with chatflowId, sessionId, etc. + * @param {ICommonObject} additionalSandbox - Additional sandbox variables + * @returns {ICommonObject} - The sandbox object + */ +export const createCodeExecutionSandbox = ( + input: string, + variables: IVariable[], + flow: ICommonObject, + additionalSandbox: ICommonObject = {} +): ICommonObject => { + const sandbox: ICommonObject = { + $input: input, + util: undefined, + Symbol: undefined, + child_process: undefined, + fs: undefined, + process: undefined, + ...additionalSandbox + } + + sandbox['$vars'] = prepareSandboxVars(variables) + sandbox['$flow'] = flow + + return sandbox +} + +/** + * Process template variables in state object, replacing {{ output }} and {{ output.property }} patterns + * @param {ICommonObject} state - The state object to process + * @param {any} finalOutput - The output value to substitute + * @returns {ICommonObject} - The processed state object + */ +export const processTemplateVariables = (state: ICommonObject, finalOutput: any): ICommonObject => { + if (!state || Object.keys(state).length === 0) { + return state + } + + const newState = { ...state } + + for (const key in newState) { + const stateValue = newState[key].toString() + if (stateValue.includes('{{ output') || stateValue.includes('{{output')) { + // Handle simple output replacement (with or without spaces) + if (stateValue === '{{ output }}' || stateValue === '{{output}}') { + newState[key] = finalOutput + continue + } + + // Handle JSON path expressions like {{ output.updated }} or {{output.updated}} + // eslint-disable-next-line + const match = stateValue.match(/\{\{\s*output\.([\w\.]+)\s*\}\}/) + if (match) { + try { + // Parse the response if it's JSON + const jsonResponse = typeof finalOutput === 'string' ? JSON.parse(finalOutput) : finalOutput + // Get the value using lodash get + const path = match[1] + const value = get(jsonResponse, path) + newState[key] = value ?? stateValue // Fall back to original if path not found + } catch (e) { + // If JSON parsing fails, keep original template + newState[key] = stateValue + } + } else { + // Handle simple {{ output }} replacement for backward compatibility + newState[key] = newState[key].replaceAll('{{ output }}', finalOutput) + } + } + } + + return newState +} + +/** + * Parse JSON body with comprehensive error handling and cleanup + * @param {string} body - The JSON string to parse + * @returns {any} - The parsed JSON object + * @throws {Error} - Detailed error message with suggestions for common JSON issues + */ +export const parseJsonBody = (body: string): any => { + try { + // First try to parse as-is with JSON5 (which handles more cases than standard JSON) + return JSON5.parse(body) + } catch (error) { + try { + // If that fails, try to clean up common issues + let cleanedBody = body + + // 1. Remove unnecessary backslash escapes for square brackets and braces + // eslint-disable-next-line + cleanedBody = cleanedBody.replace(/\\(?=[\[\]{}])/g, '') + + // 2. Fix single quotes to double quotes (but preserve quotes inside strings) + cleanedBody = cleanedBody.replace(/'/g, '"') + + // 3. Remove trailing commas before closing brackets/braces + cleanedBody = cleanedBody.replace(/,(\s*[}\]])/g, '$1') + + // 4. Remove comments (// and /* */) + cleanedBody = cleanedBody + .replace(/\/\/.*$/gm, '') // Remove single-line comments + .replace(/\/\*[\s\S]*?\*\//g, '') // Remove multi-line comments + + return JSON5.parse(cleanedBody) + } catch (secondError) { + try { + // 3rd attempt: try with standard JSON.parse on original body + return JSON.parse(body) + } catch (thirdError) { + try { + // 4th attempt: try with standard JSON.parse on cleaned body + const finalCleanedBody = body + // eslint-disable-next-line + .replace(/\\(?=[\[\]{}])/g, '') // Basic escape cleanup + .replace(/,(\s*[}\]])/g, '$1') // Remove trailing commas + .trim() + + return JSON.parse(finalCleanedBody) + } catch (fourthError) { + // Provide comprehensive error message with suggestions + const suggestions = [ + 'โ€ข Ensure all strings are enclosed in double quotes', + 'โ€ข Remove trailing commas', + 'โ€ข Remove comments (// or /* */)', + 'โ€ข Escape special characters properly (\\n for newlines, \\" for quotes)', + 'โ€ข Use double quotes instead of single quotes', + 'โ€ข Remove unnecessary backslashes before brackets [ ] { }' + ] + + throw new Error( + `Invalid JSON format in body. Original error: ${error.message}. ` + + `After cleanup attempts: ${secondError.message}. 3rd attempt: ${thirdError.message}. Final attempt: ${fourthError.message}.\n\n` + + `Common fixes:\n${suggestions.join('\n')}\n\n` + + `Received body: ${body.substring(0, 200)}${body.length > 200 ? '...' : ''}` + ) + } + } + } + } +} + +/** + * Parse a value against a Zod schema with automatic type conversion for common type mismatches + * @param schema - The Zod schema to parse against + * @param arg - The value to parse + * @param maxDepth - Maximum recursion depth to prevent infinite loops (default: 10) + * @returns The parsed value + * @throws Error if parsing fails after attempting type conversions + */ +export async function parseWithTypeConversion(schema: T, arg: unknown, maxDepth: number = 10): Promise> { + // Safety check: prevent infinite recursion + if (maxDepth <= 0) { + throw new Error('Maximum recursion depth reached in parseWithTypeConversion') + } + + try { + return await schema.parseAsync(arg) + } catch (e) { + // Check if it's a ZodError and try to fix type mismatches + if (z.ZodError && e instanceof z.ZodError) { + const zodError = e as z.ZodError + // Deep clone the arg to avoid mutating the original + const modifiedArg = typeof arg === 'object' && arg !== null ? cloneDeep(arg) : arg + let hasModification = false + + // Helper function to set a value at a nested path + const setValueAtPath = (obj: any, path: (string | number)[], value: any): void => { + let current = obj + for (let i = 0; i < path.length - 1; i++) { + const key = path[i] + if (current && typeof current === 'object' && key in current) { + current = current[key] + } else { + return // Path doesn't exist + } + } + if (current !== undefined && current !== null) { + const finalKey = path[path.length - 1] + current[finalKey] = value + } + } + + // Helper function to get a value at a nested path + const getValueAtPath = (obj: any, path: (string | number)[]): any => { + let current = obj + for (const key of path) { + if (current && typeof current === 'object' && key in current) { + current = current[key] + } else { + return undefined + } + } + return current + } + + // Helper function to convert value to expected type + const convertValue = (value: any, expected: string, received: string): any => { + // Expected string + if (expected === 'string') { + if (received === 'object' || received === 'array') { + return JSON.stringify(value) + } + if (received === 'number' || received === 'boolean') { + return String(value) + } + } + // Expected number + else if (expected === 'number') { + if (received === 'string') { + const parsed = parseFloat(value) + if (!isNaN(parsed)) { + return parsed + } + } + if (received === 'boolean') { + return value ? 1 : 0 + } + } + // Expected boolean + else if (expected === 'boolean') { + if (received === 'string') { + const lower = String(value).toLowerCase().trim() + if (lower === 'true' || lower === '1' || lower === 'yes') { + return true + } + if (lower === 'false' || lower === '0' || lower === 'no') { + return false + } + } + if (received === 'number') { + return value !== 0 + } + } + // Expected object + else if (expected === 'object') { + if (received === 'string') { + try { + const parsed = JSON.parse(value) + if (typeof parsed === 'object' && parsed !== null && !Array.isArray(parsed)) { + return parsed + } + } catch { + // Invalid JSON, return undefined to skip conversion + } + } + } + // Expected array + else if (expected === 'array') { + if (received === 'string') { + try { + const parsed = JSON.parse(value) + if (Array.isArray(parsed)) { + return parsed + } + } catch { + // Invalid JSON, return undefined to skip conversion + } + } + if (received === 'object' && value !== null) { + // Convert object to array (e.g., {0: 'a', 1: 'b'} -> ['a', 'b']) + // Only if it looks like an array-like object + const keys = Object.keys(value) + const numericKeys = keys.filter((k) => /^\d+$/.test(k)) + if (numericKeys.length === keys.length) { + return numericKeys.map((k) => value[k]) + } + } + } + return undefined // No conversion possible + } + + // Process each issue in the error + for (const issue of zodError.issues) { + // Handle invalid_type errors (type mismatches) + if (issue.code === 'invalid_type' && issue.path.length > 0) { + try { + const valueAtPath = getValueAtPath(modifiedArg, issue.path) + if (valueAtPath !== undefined) { + const convertedValue = convertValue(valueAtPath, issue.expected, issue.received) + if (convertedValue !== undefined) { + setValueAtPath(modifiedArg, issue.path, convertedValue) + hasModification = true + } + } + } catch (pathError) { + console.error('Error processing path in Zod error', pathError) + } + } + } + + // If we modified the arg, recursively call parseWithTypeConversion + // This allows newly surfaced nested errors to also get conversion treatment + // Decrement maxDepth to prevent infinite recursion + if (hasModification) { + return await parseWithTypeConversion(schema, modifiedArg, maxDepth - 1) + } + } + // Re-throw the original error if not a ZodError or no conversion possible + throw e + } +} + +/** + * Configures structured output for the LLM using Zod schema + * @param {BaseChatModel} llmNodeInstance - The LLM instance to configure + * @param {any[]} structuredOutput - Array of structured output schema definitions + * @returns {BaseChatModel} - The configured LLM instance + */ +export const configureStructuredOutput = (llmNodeInstance: BaseChatModel, structuredOutput: any[]): BaseChatModel => { + try { + const zodObj: ICommonObject = {} + for (const sch of structuredOutput) { + if (sch.type === 'string') { + zodObj[sch.key] = z.string().describe(sch.description || '') + } else if (sch.type === 'stringArray') { + zodObj[sch.key] = z.array(z.string()).describe(sch.description || '') + } else if (sch.type === 'number') { + zodObj[sch.key] = z.number().describe(sch.description || '') + } else if (sch.type === 'boolean') { + zodObj[sch.key] = z.boolean().describe(sch.description || '') + } else if (sch.type === 'enum') { + const enumValues = sch.enumValues?.split(',').map((item: string) => item.trim()) || [] + zodObj[sch.key] = z + .enum(enumValues.length ? (enumValues as [string, ...string[]]) : ['default']) + .describe(sch.description || '') + } else if (sch.type === 'jsonArray') { + const jsonSchema = sch.jsonSchema + if (jsonSchema) { + try { + // Parse the JSON schema + const schemaObj = JSON.parse(jsonSchema) + + // Create a Zod schema from the JSON schema + const itemSchema = createZodSchemaFromJSON(schemaObj) + + // Create an array schema of the item schema + zodObj[sch.key] = z.array(itemSchema).describe(sch.description || '') + } catch (err) { + console.error(`Error parsing JSON schema for ${sch.key}:`, err) + // Fallback to generic array of records + zodObj[sch.key] = z.array(z.record(z.any())).describe(sch.description || '') + } + } else { + // If no schema provided, use generic array of records + zodObj[sch.key] = z.array(z.record(z.any())).describe(sch.description || '') + } + } + } + const structuredOutputSchema = z.object(zodObj) + + // @ts-ignore + return llmNodeInstance.withStructuredOutput(structuredOutputSchema) + } catch (exception) { + console.error(exception) + return llmNodeInstance + } +} + +/** + * Creates a Zod schema from a JSON schema object + * @param {any} jsonSchema - The JSON schema object + * @returns {z.ZodTypeAny} - A Zod schema + */ +export const createZodSchemaFromJSON = (jsonSchema: any): z.ZodTypeAny => { + // If the schema is an object with properties, create an object schema + if (typeof jsonSchema === 'object' && jsonSchema !== null) { + const schemaObj: Record = {} + + // Process each property in the schema + for (const [key, value] of Object.entries(jsonSchema)) { + if (value === null) { + // Handle null values + schemaObj[key] = z.null() + } else if (typeof value === 'object' && !Array.isArray(value)) { + // Check if the property has a type definition + if ('type' in value) { + const type = value.type as string + const description = ('description' in value ? (value.description as string) : '') || '' + + // Create the appropriate Zod type based on the type property + if (type === 'string') { + schemaObj[key] = z.string().describe(description) + } else if (type === 'number') { + schemaObj[key] = z.number().describe(description) + } else if (type === 'boolean') { + schemaObj[key] = z.boolean().describe(description) + } else if (type === 'array') { + // If it's an array type, check if items is defined + if ('items' in value && value.items) { + const itemSchema = createZodSchemaFromJSON(value.items) + schemaObj[key] = z.array(itemSchema).describe(description) + } else { + // Default to array of any if items not specified + schemaObj[key] = z.array(z.any()).describe(description) + } + } else if (type === 'object') { + // If it's an object type, check if properties is defined + if ('properties' in value && value.properties) { + const nestedSchema = createZodSchemaFromJSON(value.properties) + schemaObj[key] = nestedSchema.describe(description) + } else { + // Default to record of any if properties not specified + schemaObj[key] = z.record(z.any()).describe(description) + } + } else { + // Default to any for unknown types + schemaObj[key] = z.any().describe(description) + } + + // Check if the property is optional + if ('optional' in value && value.optional === true) { + schemaObj[key] = schemaObj[key].optional() + } + } else if (Array.isArray(value)) { + // Array values without a type property + if (value.length > 0) { + // If the array has items, recursively create a schema for the first item + const itemSchema = createZodSchemaFromJSON(value[0]) + schemaObj[key] = z.array(itemSchema) + } else { + // Empty array, allow any array + schemaObj[key] = z.array(z.any()) + } + } else { + // It's a nested object without a type property, recursively create schema + schemaObj[key] = createZodSchemaFromJSON(value) + } + } else if (Array.isArray(value)) { + // Array values + if (value.length > 0) { + // If the array has items, recursively create a schema for the first item + const itemSchema = createZodSchemaFromJSON(value[0]) + schemaObj[key] = z.array(itemSchema) + } else { + // Empty array, allow any array + schemaObj[key] = z.array(z.any()) + } + } else { + // For primitive values (which shouldn't be in the schema directly) + // Use the corresponding Zod type + if (typeof value === 'string') { + schemaObj[key] = z.string() + } else if (typeof value === 'number') { + schemaObj[key] = z.number() + } else if (typeof value === 'boolean') { + schemaObj[key] = z.boolean() + } else { + schemaObj[key] = z.any() + } + } + } + + return z.object(schemaObj) + } + + // Fallback to any for unknown types + return z.any() +} diff --git a/packages/components/src/validator.ts b/packages/components/src/validator.ts index 4948165eb..5a72144f0 100644 --- a/packages/components/src/validator.ts +++ b/packages/components/src/validator.ts @@ -9,6 +9,20 @@ export const isValidUUID = (uuid: string): boolean => { return uuidV4Pattern.test(uuid) } +/** + * Validates if a string is a valid URL + * @param {string} url The string to validate + * @returns {boolean} True if valid URL, false otherwise + */ +export const isValidURL = (url: string): boolean => { + try { + new URL(url) + return true + } catch { + return false + } +} + /** * Validates if a string contains path traversal attempts * @param {string} path The string to validate @@ -27,3 +41,31 @@ export const isPathTraversal = (path: string): boolean => { return dangerousPatterns.some((pattern) => path.toLowerCase().includes(pattern)) } + +/** + * Enhanced path validation for workspace-scoped file operations + * @param {string} filePath The file path to validate + * @returns {boolean} True if path traversal detected, false otherwise + */ +export const isUnsafeFilePath = (filePath: string): boolean => { + if (!filePath || typeof filePath !== 'string') { + return true + } + + // Check for path traversal patterns + const dangerousPatterns = [ + /\.\./, // Directory traversal (..) + /%2e%2e/i, // URL encoded .. + /%2f/i, // URL encoded / + /%5c/i, // URL encoded \ + /\0/, // Null bytes + // eslint-disable-next-line no-control-regex + /[\x00-\x1f]/, // Control characters + /^\/[^/]/, // Absolute Unix paths (starting with /) + /^[a-zA-Z]:\\/, // Absolute Windows paths (C:\) + /^\\\\[^\\]/, // UNC paths (\\server\) + /^\\\\\?\\/ // Extended-length paths (\\?\) + ] + + return dangerousPatterns.some((pattern) => pattern.test(filePath)) +} diff --git a/packages/components/tsconfig.json b/packages/components/tsconfig.json index bda815f9a..edac0ceea 100644 --- a/packages/components/tsconfig.json +++ b/packages/components/tsconfig.json @@ -17,5 +17,5 @@ "module": "commonjs" }, "include": ["src", "nodes", "credentials"], - "exclude": ["gulpfile.ts", "node_modules", "dist"] + "exclude": ["gulpfile.ts", "node_modules", "dist", "**/*.test.ts", "**/*.test.js", "**/*.spec.ts", "**/*.spec.js"] } diff --git a/packages/server/.env.example b/packages/server/.env.example index 54db59268..282e4cd33 100644 --- a/packages/server/.env.example +++ b/packages/server/.env.example @@ -1,19 +1,10 @@ PORT=3000 -# APIKEY_STORAGE_TYPE=json (json | db) -# APIKEY_PATH=/your_api_key_path/.flowise +# APIKEY_PATH=/your_apikey_path/.flowise # (will be deprecated by end of 2025) -# SECRETKEY_STORAGE_TYPE=local #(local | aws) -# SECRETKEY_PATH=/your_api_key_path/.flowise -# FLOWISE_SECRETKEY_OVERWRITE=myencryptionkey -# SECRETKEY_AWS_ACCESS_KEY= -# SECRETKEY_AWS_SECRET_KEY= -# SECRETKEY_AWS_REGION=us-west-2 -# SECRETKEY_AWS_NAME=FlowiseEncryptionKey - -# NUMBER_OF_PROXIES= 1 -# CORS_ORIGINS=* -# IFRAME_ORIGINS=* +############################################################################################################ +############################################## DATABASE #################################################### +############################################################################################################ # DATABASE_PATH=/your_database_path/.flowise # DATABASE_TYPE=postgres @@ -23,28 +14,42 @@ PORT=3000 # DATABASE_USER=root # DATABASE_PASSWORD=mypassword # DATABASE_SSL=true +# DATABASE_REJECT_UNAUTHORIZED=true # DATABASE_SSL_KEY_BASE64= -# FLOWISE_USERNAME=user -# FLOWISE_PASSWORD=1234 -# FLOWISE_FILE_SIZE_LIMIT=50mb + +############################################################################################################ +############################################## SECRET KEYS ################################################# +############################################################################################################ + +# SECRETKEY_STORAGE_TYPE=local #(local | aws) +# SECRETKEY_PATH=/your_secret_path/.flowise +# FLOWISE_SECRETKEY_OVERWRITE=myencryptionkey # (if you want to overwrite the secret key) +# SECRETKEY_AWS_ACCESS_KEY= +# SECRETKEY_AWS_SECRET_KEY= +# SECRETKEY_AWS_REGION=us-west-2 +# SECRETKEY_AWS_NAME=FlowiseEncryptionKey + + +############################################################################################################ +############################################## LOGGING ##################################################### +############################################################################################################ # DEBUG=true # LOG_PATH=/your_log_path/.flowise/logs -# LOG_LEVEL=info (error | warn | info | verbose | debug) +# LOG_LEVEL=info #(error | warn | info | verbose | debug) +# LOG_SANITIZE_BODY_FIELDS=password,pwd,pass,secret,token,apikey,api_key,accesstoken,access_token,refreshtoken,refresh_token,clientsecret,client_secret,privatekey,private_key,secretkey,secret_key,auth,authorization,credential,credentials +# LOG_SANITIZE_HEADER_FIELDS=authorization,x-api-key,x-auth-token,cookie # TOOL_FUNCTION_BUILTIN_DEP=crypto,fs # TOOL_FUNCTION_EXTERNAL_DEP=moment,lodash +# ALLOW_BUILTIN_DEP=false -# LANGCHAIN_TRACING_V2=true -# LANGCHAIN_ENDPOINT=https://api.smith.langchain.com -# LANGCHAIN_API_KEY=your_api_key -# LANGCHAIN_PROJECT=your_project -# Uncomment the following line to enable model list config, load the list of models from your local config file -# see https://raw.githubusercontent.com/FlowiseAI/Flowise/main/packages/components/models.json for the format -# MODEL_LIST_CONFIG_JSON=/your_model_list_config_file_path +############################################################################################################ +############################################## STORAGE ##################################################### +############################################################################################################ -# STORAGE_TYPE=local (local | s3) +# STORAGE_TYPE=local (local | s3 | gcs) # BLOB_STORAGE_PATH=/your_storage_path/.flowise/storage # S3_STORAGE_BUCKET_NAME=flowise # S3_STORAGE_ACCESS_KEY_ID= @@ -57,12 +62,70 @@ PORT=3000 # GOOGLE_CLOUD_STORAGE_BUCKET_NAME= # GOOGLE_CLOUD_UNIFORM_BUCKET_ACCESS=true -# SHOW_COMMUNITY_NODES=true -# DISABLED_NODES=bufferMemory,chatOpenAI (comma separated list of node names to disable) -###################### -# METRICS COLLECTION -####################### +############################################################################################################ +############################################## SETTINGS #################################################### +############################################################################################################ + +# NUMBER_OF_PROXIES= 1 +# CORS_ORIGINS=* +# IFRAME_ORIGINS=* +# FLOWISE_FILE_SIZE_LIMIT=50mb +# SHOW_COMMUNITY_NODES=true +# DISABLE_FLOWISE_TELEMETRY=true +# DISABLED_NODES=bufferMemory,chatOpenAI (comma separated list of node names to disable) +# Uncomment the following line to enable model list config, load the list of models from your local config file +# see https://raw.githubusercontent.com/FlowiseAI/Flowise/main/packages/components/models.json for the format +# MODEL_LIST_CONFIG_JSON=/your_model_list_config_file_path + + +############################################################################################################ +############################################ AUTH PARAMETERS ############################################### +############################################################################################################ + +# APP_URL=http://localhost:3000 + +# SMTP_HOST=smtp.host.com +# SMTP_PORT=465 +# SMTP_USER=smtp_user +# SMTP_PASSWORD=smtp_password +# SMTP_SECURE=true +# ALLOW_UNAUTHORIZED_CERTS=false +# SENDER_EMAIL=team@example.com + +JWT_AUTH_TOKEN_SECRET='AABBCCDDAABBCCDDAABBCCDDAABBCCDDAABBCCDD' +JWT_REFRESH_TOKEN_SECRET='AABBCCDDAABBCCDDAABBCCDDAABBCCDDAABBCCDD' +JWT_ISSUER='ISSUER' +JWT_AUDIENCE='AUDIENCE' +JWT_TOKEN_EXPIRY_IN_MINUTES=360 +JWT_REFRESH_TOKEN_EXPIRY_IN_MINUTES=43200 +# EXPIRE_AUTH_TOKENS_ON_RESTART=true # (if you need to expire all tokens on app restart) +# EXPRESS_SESSION_SECRET=flowise +# SECURE_COOKIES= + +# INVITE_TOKEN_EXPIRY_IN_HOURS=24 +# PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS=15 +# PASSWORD_SALT_HASH_ROUNDS=10 +# TOKEN_HASH_SECRET='popcorn' + +# WORKSPACE_INVITE_TEMPLATE_PATH=/path/to/custom/workspace_invite.hbs + + +############################################################################################################ +############################################# ENTERPRISE ################################################### +############################################################################################################ + +# LICENSE_URL= +# FLOWISE_EE_LICENSE_KEY= +# OFFLINE= + + +############################################################################################################ +########################################### METRICS COLLECTION ############################################# +############################################################################################################ + +# POSTHOG_PUBLIC_API_KEY=your_posthog_public_api_key + # ENABLE_METRICS=false # METRICS_PROVIDER=prometheus # prometheus | open_telemetry # METRICS_INCLUDE_NODE_METRICS=true # default is true @@ -73,15 +136,21 @@ PORT=3000 # METRICS_OPEN_TELEMETRY_PROTOCOL=http # http | grpc | proto (default is http) # METRICS_OPEN_TELEMETRY_DEBUG=true # default is false -# Uncomment the following lines to enable global agent proxy -# see https://www.npmjs.com/package/global-agent for more details + +############################################################################################################ +############################################### PROXY ###################################################### +############################################################################################################ + +# Uncomment the following lines to enable global agent proxy, see https://www.npmjs.com/package/global-agent for more details # GLOBAL_AGENT_HTTP_PROXY=CorporateHttpProxyUrl # GLOBAL_AGENT_HTTPS_PROXY=CorporateHttpsProxyUrl # GLOBAL_AGENT_NO_PROXY=ExceptionHostsToBypassProxyIfNeeded -###################### -# QUEUE CONFIGURATION -####################### + +############################################################################################################ +########################################### QUEUE CONFIGURATION ############################################ +############################################################################################################ + # MODE=queue #(queue | main) # QUEUE_NAME=flowise-queue # QUEUE_REDIS_EVENT_STREAM_MAX_LEN=100000 @@ -98,4 +167,22 @@ PORT=3000 # REDIS_KEY= # REDIS_CA= # REDIS_KEEP_ALIVE= -# ENABLE_BULLMQ_DASHBOARD= \ No newline at end of file +# ENABLE_BULLMQ_DASHBOARD= + + +############################################################################################################ +############################################## SECURITY #################################################### +############################################################################################################ + +# HTTP_DENY_LIST= +# CUSTOM_MCP_SECURITY_CHECK=true +# CUSTOM_MCP_PROTOCOL=sse #(stdio | sse) +# TRUST_PROXY=true #(true | false | 1 | loopback| linklocal | uniquelocal | IP addresses | loopback, IP addresses) + + +############################################################################################################ +########################################### DOCUMENT LOADERS ############################################### +############################################################################################################ + +# PUPPETEER_EXECUTABLE_FILE_PATH='C:\\Program Files\\Google\\Chrome\\Application\\chrome.exe' +# PLAYWRIGHT_EXECUTABLE_FILE_PATH='C:\\Program Files\\Google\\Chrome\\Application\\chrome.exe' diff --git a/packages/server/README-ZH.md b/packages/server/README-ZH.md index f0d7992e0..01f2248c2 100644 --- a/packages/server/README-ZH.md +++ b/packages/server/README-ZH.md @@ -22,15 +22,6 @@ 3. ๆ‰“ๅผ€[http://localhost:3000](http://localhost:3000) -## ๐Ÿ”’ ่บซไปฝ้ชŒ่ฏ - -่ฆๅฏ็”จๅบ”็”จ็บง่บซไปฝ้ชŒ่ฏ๏ผŒ่ฏทๅฐ†`FLOWISE_USERNAME`ๅ’Œ`FLOWISE_PASSWORD`ๆทปๅŠ ๅˆฐ`.env`ๆ–‡ไปถไธญ๏ผš - -``` -FLOWISE_USERNAME=user -FLOWISE_PASSWORD=1234 -``` - ## ๐ŸŒฑ ็Žฏๅขƒๅ˜้‡ Flowise ๆ”ฏๆŒไธๅŒ็š„็Žฏๅขƒๅ˜้‡ๆฅ้…็ฝฎๆ‚จ็š„ๅฎžไพ‹ใ€‚ๆ‚จๅฏไปฅๅœจ`packages/server`ๆ–‡ไปถๅคนไธญ็š„`.env`ๆ–‡ไปถไธญๆŒ‡ๅฎšไปฅไธ‹ๅ˜้‡ใ€‚้˜…่ฏป[ๆ›ดๅคš](https://docs.flowiseai.com/environment-variables) diff --git a/packages/server/README.md b/packages/server/README.md index cc52dc522..c3c6c9a16 100644 --- a/packages/server/README.md +++ b/packages/server/README.md @@ -22,15 +22,6 @@ English | [ไธญๆ–‡](./README-ZH.md) 3. Open [http://localhost:3000](http://localhost:3000) -## ๐Ÿ”’ Authentication - -To enable app level authentication, add `FLOWISE_USERNAME` and `FLOWISE_PASSWORD` to the `.env` file: - -``` -FLOWISE_USERNAME=user -FLOWISE_PASSWORD=1234 -``` - ## ๐ŸŒฑ Env Variables Flowise support different environment variables to configure your instance. You can specify the following variables in the `.env` file inside `packages/server` folder. Read [more](https://github.com/FlowiseAI/Flowise/blob/main/CONTRIBUTING.md#-env-variables) @@ -50,7 +41,7 @@ cd Flowise/packages/server pnpm install ./node_modules/.bin/cypress install pnpm build -#Only for writting new tests on local dev -> pnpm run cypress:open +#Only for writing new tests on local dev -> pnpm run cypress:open pnpm run e2e ``` diff --git a/packages/server/cypress/e2e/1-apikey/apikey.cy.js b/packages/server/cypress/e2e/1-apikey/apikey.cy.js index 2ce7a2bbc..06576be88 100644 --- a/packages/server/cypress/e2e/1-apikey/apikey.cy.js +++ b/packages/server/cypress/e2e/1-apikey/apikey.cy.js @@ -1,3 +1,6 @@ +/* +* TODO: Disabling for now as we need to enable login first +* describe('E2E suite for api/v1/apikey API endpoint', () => { beforeEach(() => { cy.visit('http://localhost:3000/apikey') @@ -43,3 +46,4 @@ describe('E2E suite for api/v1/apikey API endpoint', () => { cy.get('table.MuiTable-root tbody tr').should('have.length', 1) }) }) +*/ diff --git a/packages/server/cypress/e2e/2-variables/variables.cy.js b/packages/server/cypress/e2e/2-variables/variables.cy.js index bcb1ec5c4..f2b8c3401 100644 --- a/packages/server/cypress/e2e/2-variables/variables.cy.js +++ b/packages/server/cypress/e2e/2-variables/variables.cy.js @@ -1,3 +1,6 @@ +/* +* TODO: Disabling for now as we need to enable login first +* describe('E2E suite for api/v1/variables API endpoint', () => { beforeEach(() => { cy.visit('http://localhost:3000/variables') @@ -9,7 +12,7 @@ describe('E2E suite for api/v1/variables API endpoint', () => { }) // CREATE - it('can add new variable', () => { + it.skip('can add new variable', () => { const newVariableName = 'MafiVariable' const newVariableValue = 'shh!!! secret value' cy.get('#btn_createVariable').click() @@ -21,14 +24,14 @@ describe('E2E suite for api/v1/variables API endpoint', () => { }) // READ - it('can retrieve all api keys', () => { + it.skip('can retrieve all api keys', () => { const newVariableName = 'MafiVariable' cy.get('.MuiTable-root tbody tr').should('have.length', 1) cy.get('.MuiTable-root tbody tr').last().find('th').first().find('div').first().should('have.text', newVariableName) }) // UPDATE - it('can update new api key', () => { + it.skip('can update new api key', () => { const updatedVariableName = 'PichiVariable' const updatedVariableValue = 'silence shh! value' cy.get('.MuiTable-root tbody tr').last().find('td').eq(4).find('button').click() @@ -40,10 +43,11 @@ describe('E2E suite for api/v1/variables API endpoint', () => { }) // DELETE - it('can delete new api key', () => { + it.skip('can delete new api key', () => { cy.get('.MuiTable-root tbody tr').last().find('td').eq(5).find('button').click() cy.get('.MuiDialog-scrollPaper .MuiDialogActions-spacing button').last().click() cy.get('.MuiTable-root tbody tr').should('have.length', 0) cy.get('.MuiCardContent-root .MuiStack-root').last().find('div').last().should('have.text', 'No Variables Yet') }) }) +*/ diff --git a/packages/server/gulpfile.ts b/packages/server/gulpfile.ts new file mode 100644 index 000000000..8de915dda --- /dev/null +++ b/packages/server/gulpfile.ts @@ -0,0 +1,7 @@ +import { dest, src } from 'gulp' + +function copyEmailTemplates() { + return src(['src/enterprise/emails/*.hbs']).pipe(dest('dist/enterprise/emails')) +} + +exports.default = copyEmailTemplates diff --git a/packages/server/jest.config.js b/packages/server/jest.config.js new file mode 100644 index 000000000..17a5e4ce1 --- /dev/null +++ b/packages/server/jest.config.js @@ -0,0 +1,23 @@ +module.exports = { + // Use ts-jest preset for testing TypeScript files with Jest + preset: 'ts-jest', + // Set the test environment to Node.js + testEnvironment: 'node', + + // Define the root directory for tests and modules + roots: ['/test'], + + // Use ts-jest to transform TypeScript files + transform: { + '^.+\\.tsx?$': 'ts-jest' + }, + + // Regular expression to find test files + testRegex: '((\\.|/)index\\.test)\\.tsx?$', + + // File extensions to recognize in module resolution + moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node'], + + // Display individual test results with the test suite hierarchy. + verbose: true +} diff --git a/packages/server/marketplaces/agentflowsv2/Agentic RAG V2.json b/packages/server/marketplaces/agentflowsv2/Agentic RAG.json similarity index 99% rename from packages/server/marketplaces/agentflowsv2/Agentic RAG V2.json rename to packages/server/marketplaces/agentflowsv2/Agentic RAG.json index 343307f12..41633eb52 100644 --- a/packages/server/marketplaces/agentflowsv2/Agentic RAG V2.json +++ b/packages/server/marketplaces/agentflowsv2/Agentic RAG.json @@ -1,6 +1,6 @@ { "description": "An agent based approach using AgentflowV2 to perform self-correcting question answering over documents", - "usecases": ["Reflective Agent"], + "usecases": ["Reflective Agent", "Documents QnA"], "nodes": [ { "id": "startAgentflow_0", @@ -12,7 +12,7 @@ "data": { "id": "startAgentflow_0", "label": "Start", - "version": 1, + "version": 1.1, "name": "startAgentflow", "type": "Start", "color": "#7EE787", @@ -157,6 +157,15 @@ ], "id": "startAgentflow_0-input-startState-array", "display": true + }, + { + "label": "Persist State", + "name": "startPersistState", + "type": "boolean", + "description": "Persist the state in the same session", + "optional": true, + "id": "startAgentflow_0-input-startPersistState-boolean", + "display": true } ], "inputAnchors": [], @@ -294,7 +303,7 @@ "baseOptions": "", "allowImageUploads": "", "imageResolution": "low", - "reasoningEffort": "medium", + "reasoningEffort": "", "conditionAgentModel": "chatOpenAI" } }, @@ -633,7 +642,7 @@ "baseOptions": "", "allowImageUploads": "", "imageResolution": "low", - "reasoningEffort": "medium", + "reasoningEffort": "", "llmModel": "chatOpenAI" } }, @@ -955,7 +964,7 @@ "baseOptions": "", "allowImageUploads": "", "imageResolution": "low", - "reasoningEffort": "medium", + "reasoningEffort": "", "llmModel": "chatOpenAI" } }, @@ -1201,7 +1210,7 @@ "baseOptions": "", "allowImageUploads": "", "imageResolution": "low", - "reasoningEffort": "medium", + "reasoningEffort": "", "conditionAgentModel": "chatOpenAI" } }, @@ -1530,7 +1539,7 @@ "baseOptions": "", "allowImageUploads": "", "imageResolution": "low", - "reasoningEffort": "medium", + "reasoningEffort": "", "llmModel": "chatOpenAI" } }, @@ -1862,7 +1871,7 @@ "baseOptions": "", "allowImageUploads": "", "imageResolution": "low", - "reasoningEffort": "medium", + "reasoningEffort": "", "llmModel": "chatOpenAI" } }, diff --git a/packages/server/marketplaces/agentflowsv2/Agents Handoff.json b/packages/server/marketplaces/agentflowsv2/Agents Handoff.json index 72b4da969..f8ae16bfc 100644 --- a/packages/server/marketplaces/agentflowsv2/Agents Handoff.json +++ b/packages/server/marketplaces/agentflowsv2/Agents Handoff.json @@ -12,7 +12,7 @@ "data": { "id": "startAgentflow_0", "label": "Start", - "version": 1, + "version": 1.1, "name": "startAgentflow", "type": "Start", "color": "#7EE787", @@ -157,6 +157,15 @@ ], "id": "startAgentflow_0-input-startState-array", "display": true + }, + { + "label": "Persist State", + "name": "startPersistState", + "type": "boolean", + "description": "Persist the state in the same session", + "optional": true, + "id": "startAgentflow_0-input-startPersistState-boolean", + "display": true } ], "inputAnchors": [], @@ -291,7 +300,7 @@ "proxyUrl": "", "baseOptions": "", "allowImageUploads": true, - "reasoningEffort": "medium", + "reasoningEffort": "", "conditionAgentModel": "chatOpenAI" } }, diff --git a/packages/server/marketplaces/agentflowsv2/Deep Research V2.json b/packages/server/marketplaces/agentflowsv2/Deep Research With Multi-turn Conversations.json similarity index 97% rename from packages/server/marketplaces/agentflowsv2/Deep Research V2.json rename to packages/server/marketplaces/agentflowsv2/Deep Research With Multi-turn Conversations.json index bd58656b8..031713a28 100644 --- a/packages/server/marketplaces/agentflowsv2/Deep Research V2.json +++ b/packages/server/marketplaces/agentflowsv2/Deep Research With Multi-turn Conversations.json @@ -1,18 +1,18 @@ { - "description": "An agent capable of performing research, synthesizing information, and generating in-depth, well-structured white papers on any given topic", - "usecases": ["Agent"], + "description": "Deep research system that conducts multi-turn agent conversations to perform web search, synthesize insights and generate well-structured white papers", + "usecases": ["Deep Research"], "nodes": [ { "id": "startAgentflow_0", "type": "agentFlow", "position": { - "x": -275.0799323960054, - "y": 31.301887150099603 + "x": -397.64170181617976, + "y": 87.52288229696859 }, "data": { "id": "startAgentflow_0", "label": "Start", - "version": 1, + "version": 1.1, "name": "startAgentflow", "type": "Start", "color": "#7EE787", @@ -160,6 +160,15 @@ ], "id": "startAgentflow_0-input-startState-array", "display": true + }, + { + "label": "Persist State", + "name": "startPersistState", + "type": "boolean", + "description": "Persist the state in the same session", + "optional": true, + "id": "startAgentflow_0-input-startPersistState-boolean", + "display": true } ], "inputAnchors": [], @@ -179,19 +188,19 @@ "selected": false }, "width": 103, - "height": 65, + "height": 66, "selected": false, "positionAbsolute": { - "x": -275.0799323960054, - "y": 31.301887150099603 + "x": -397.64170181617976, + "y": 87.52288229696859 }, "dragging": false }, { "id": "llmAgentflow_0", "position": { - "x": -59.13383952997965, - "y": 28.495983624910906 + "x": -242.41428370877253, + "y": 85.84139867471725 }, "data": { "id": "llmAgentflow_0", @@ -488,7 +497,7 @@ "baseOptions": "", "allowImageUploads": "", "imageResolution": "low", - "reasoningEffort": "medium", + "reasoningEffort": "", "llmModel": "chatOpenAI" } }, @@ -504,19 +513,19 @@ }, "type": "agentFlow", "width": 175, - "height": 71, + "height": 72, "selected": false, "positionAbsolute": { - "x": -59.13383952997965, - "y": 28.495983624910906 + "x": -242.41428370877253, + "y": 85.84139867471725 }, "dragging": false }, { "id": "agentAgentflow_0", "position": { - "x": 209.99147630894493, - "y": 100.7933285478893 + "x": -26.136703307904796, + "y": 72.89650466398558 }, "data": { "id": "agentAgentflow_0", @@ -815,13 +824,6 @@ } ], "agentTools": [ - { - "agentSelectedTool": "braveSearchAPI", - "agentSelectedToolRequiresHumanInput": "", - "agentSelectedToolConfig": { - "agentSelectedTool": "braveSearchAPI" - } - }, { "agentSelectedTool": "webScraperTool", "agentSelectedToolRequiresHumanInput": "", @@ -833,6 +835,13 @@ "description": "", "agentSelectedTool": "webScraperTool" } + }, + { + "agentSelectedTool": "braveSearchAPI", + "agentSelectedToolRequiresHumanInput": "", + "agentSelectedToolConfig": { + "agentSelectedTool": "braveSearchAPI" + } } ], "agentKnowledgeDocumentStores": "", @@ -870,20 +879,20 @@ "selected": false }, "type": "agentFlow", - "width": 199, - "height": 103, + "width": 200, + "height": 100, "selected": false, "positionAbsolute": { - "x": 209.99147630894493, - "y": 100.7933285478893 + "x": -26.136703307904796, + "y": 72.89650466398558 }, "dragging": false }, { "id": "agentAgentflow_1", "position": { - "x": 203.50865583557328, - "y": -75.13070214403373 + "x": 210.25517525319754, + "y": 73.29272504370039 }, "data": { "id": "agentAgentflow_1", @@ -1182,13 +1191,6 @@ } ], "agentTools": [ - { - "agentSelectedTool": "braveSearchAPI", - "agentSelectedToolRequiresHumanInput": "", - "agentSelectedToolConfig": { - "agentSelectedTool": "braveSearchAPI" - } - }, { "agentSelectedTool": "webScraperTool", "agentSelectedToolRequiresHumanInput": "", @@ -1200,6 +1202,13 @@ "description": "", "agentSelectedTool": "webScraperTool" } + }, + { + "agentSelectedTool": "braveSearchAPI", + "agentSelectedToolRequiresHumanInput": "", + "agentSelectedToolConfig": { + "agentSelectedTool": "braveSearchAPI" + } } ], "agentKnowledgeDocumentStores": "", @@ -1237,24 +1246,24 @@ "selected": false }, "type": "agentFlow", - "width": 199, - "height": 103, + "width": 200, + "height": 100, "selected": false, "positionAbsolute": { - "x": 203.50865583557328, - "y": -75.13070214403373 + "x": 210.25517525319754, + "y": 73.29272504370039 }, "dragging": false }, { "id": "conditionAgentflow_0", "position": { - "x": 497.07879661792845, - "y": 29.068421396935392 + "x": 457.0277025649177, + "y": 83.6060813840138 }, "data": { "id": "conditionAgentflow_0", - "label": "Condition", + "label": "Check Iterations", "version": 1, "name": "conditionAgentflow", "type": "Condition", @@ -1511,24 +1520,24 @@ "selected": false }, "type": "agentFlow", - "width": 134, + "width": 178, "height": 80, "selected": false, "positionAbsolute": { - "x": 497.07879661792845, - "y": 29.068421396935392 + "x": 457.0277025649177, + "y": 83.6060813840138 }, "dragging": false }, { "id": "loopAgentflow_0", "position": { - "x": 710.6354115635097, - "y": -61.015932400168076 + "x": 690.1837890683553, + "y": 22.494859455045713 }, "data": { "id": "loopAgentflow_0", - "label": "Loop", + "label": "Loop Back to Agent 0", "version": 1, "name": "loopAgentflow", "type": "Loop", @@ -1566,13 +1575,13 @@ "selected": false }, "type": "agentFlow", - "width": 104, - "height": 65, + "width": 211, + "height": 66, "selected": false, "dragging": false, "positionAbsolute": { - "x": 710.6354115635097, - "y": -61.015932400168076 + "x": 690.1837890683553, + "y": 22.494859455045713 } }, { @@ -1891,8 +1900,8 @@ "selected": false }, "type": "agentFlow", - "width": 199, - "height": 71, + "width": 200, + "height": 72, "selected": false, "positionAbsolute": { "x": 693.0529196789191, @@ -1903,8 +1912,8 @@ { "id": "stickyNoteAgentflow_0", "position": { - "x": -320.62033146052283, - "y": -110.15285265313359 + "x": -445.43094068657194, + "y": -61.80279682682627 }, "data": { "id": "stickyNoteAgentflow_0", @@ -1943,20 +1952,20 @@ "selected": false }, "type": "stickyNote", - "width": 203, - "height": 122, + "width": 210, + "height": 123, "selected": false, "positionAbsolute": { - "x": -320.62033146052283, - "y": -110.15285265313359 + "x": -445.43094068657194, + "y": -61.80279682682627 }, "dragging": false }, { "id": "stickyNoteAgentflow_1", "position": { - "x": 466.8306744858025, - "y": -189.9009582021492 + "x": 454.90056136362915, + "y": -146.44126039994615 }, "data": { "id": "stickyNoteAgentflow_1", @@ -1995,12 +2004,12 @@ "selected": false }, "type": "stickyNote", - "width": 203, - "height": 202, + "width": 210, + "height": 203, "selected": false, "positionAbsolute": { - "x": 466.8306744858025, - "y": -189.9009582021492 + "x": 454.90056136362915, + "y": -146.44126039994615 }, "dragging": false }, @@ -2047,8 +2056,8 @@ "selected": false }, "type": "stickyNote", - "width": 203, - "height": 283, + "width": 210, + "height": 263, "selected": false, "positionAbsolute": { "x": 693.7511120802441, diff --git a/packages/server/marketplaces/agentflowsv2/Deep Research With Subagents.json b/packages/server/marketplaces/agentflowsv2/Deep Research With Subagents.json new file mode 100644 index 000000000..a8b32dad7 --- /dev/null +++ b/packages/server/marketplaces/agentflowsv2/Deep Research With Subagents.json @@ -0,0 +1,1831 @@ +{ + "description": "Multi-agent system that breaks down complex queries, assigns tasks to subagents, and synthesizes findings into detailed reports.", + "usecases": ["Deep Research"], + "nodes": [ + { + "id": "startAgentflow_0", + "type": "agentFlow", + "position": { + "x": -241.58365178492127, + "y": 86.32546838777353 + }, + "data": { + "id": "startAgentflow_0", + "label": "Start", + "version": 1.1, + "name": "startAgentflow", + "type": "Start", + "color": "#7EE787", + "hideInput": true, + "baseClasses": ["Start"], + "category": "Agent Flows", + "description": "Starting point of the agentflow", + "inputParams": [ + { + "label": "Input Type", + "name": "startInputType", + "type": "options", + "options": [ + { + "label": "Chat Input", + "name": "chatInput", + "description": "Start the conversation with chat input" + }, + { + "label": "Form Input", + "name": "formInput", + "description": "Start the workflow with form inputs" + } + ], + "default": "chatInput", + "id": "startAgentflow_0-input-startInputType-options", + "display": true + }, + { + "label": "Form Title", + "name": "formTitle", + "type": "string", + "placeholder": "Please Fill Out The Form", + "show": { + "startInputType": "formInput" + }, + "id": "startAgentflow_0-input-formTitle-string", + "display": true + }, + { + "label": "Form Description", + "name": "formDescription", + "type": "string", + "placeholder": "Complete all fields below to continue", + "show": { + "startInputType": "formInput" + }, + "id": "startAgentflow_0-input-formDescription-string", + "display": true + }, + { + "label": "Form Input Types", + "name": "formInputTypes", + "description": "Specify the type of form input", + "type": "array", + "show": { + "startInputType": "formInput" + }, + "array": [ + { + "label": "Type", + "name": "type", + "type": "options", + "options": [ + { + "label": "String", + "name": "string" + }, + { + "label": "Number", + "name": "number" + }, + { + "label": "Boolean", + "name": "boolean" + }, + { + "label": "Options", + "name": "options" + } + ], + "default": "string" + }, + { + "label": "Label", + "name": "label", + "type": "string", + "placeholder": "Label for the input" + }, + { + "label": "Variable Name", + "name": "name", + "type": "string", + "placeholder": "Variable name for the input (must be camel case)", + "description": "Variable name must be camel case. For example: firstName, lastName, etc." + }, + { + "label": "Add Options", + "name": "addOptions", + "type": "array", + "show": { + "formInputTypes[$index].type": "options" + }, + "array": [ + { + "label": "Option", + "name": "option", + "type": "string" + } + ] + } + ], + "id": "startAgentflow_0-input-formInputTypes-array", + "display": true + }, + { + "label": "Ephemeral Memory", + "name": "startEphemeralMemory", + "type": "boolean", + "description": "Start fresh for every execution without past chat history", + "optional": true + }, + { + "label": "Flow State", + "name": "startState", + "description": "Runtime state during the execution of the workflow", + "type": "array", + "optional": true, + "array": [ + { + "label": "Key", + "name": "key", + "type": "string", + "placeholder": "Foo" + }, + { + "label": "Value", + "name": "value", + "type": "string", + "placeholder": "Bar" + } + ], + "id": "startAgentflow_0-input-startState-array", + "display": true + }, + { + "label": "Persist State", + "name": "startPersistState", + "type": "boolean", + "description": "Persist the state in the same session", + "optional": true, + "id": "startAgentflow_0-input-startPersistState-boolean", + "display": true + } + ], + "inputAnchors": [], + "inputs": { + "startInputType": "formInput", + "formTitle": "Research", + "formDescription": "A research agent that takes in a query, and return a detailed report", + "formInputTypes": [ + { + "type": "string", + "label": "Query", + "name": "query", + "addOptions": "" + } + ], + "startState": [ + { + "key": "subagents", + "value": "" + }, + { + "key": "findings", + "value": "" + } + ] + }, + "outputAnchors": [ + { + "id": "startAgentflow_0-output-startAgentflow", + "label": "Start", + "name": "startAgentflow" + } + ], + "outputs": {}, + "selected": false + }, + "width": 103, + "height": 66, + "selected": false, + "positionAbsolute": { + "x": -241.58365178492127, + "y": 86.32546838777353 + }, + "dragging": false + }, + { + "id": "llmAgentflow_0", + "position": { + "x": -111.52635639216058, + "y": 83.67035986437665 + }, + "data": { + "id": "llmAgentflow_0", + "label": "Planner", + "version": 1, + "name": "llmAgentflow", + "type": "LLM", + "color": "#64B5F6", + "baseClasses": ["LLM"], + "category": "Agent Flows", + "description": "Large language models to analyze user-provided inputs and generate responses", + "inputParams": [ + { + "label": "Model", + "name": "llmModel", + "type": "asyncOptions", + "loadMethod": "listModels", + "loadConfig": true, + "id": "llmAgentflow_0-input-llmModel-asyncOptions", + "display": true + }, + { + "label": "Messages", + "name": "llmMessages", + "type": "array", + "optional": true, + "acceptVariable": true, + "array": [ + { + "label": "Role", + "name": "role", + "type": "options", + "options": [ + { + "label": "System", + "name": "system" + }, + { + "label": "Assistant", + "name": "assistant" + }, + { + "label": "Developer", + "name": "developer" + }, + { + "label": "User", + "name": "user" + } + ] + }, + { + "label": "Content", + "name": "content", + "type": "string", + "acceptVariable": true, + "generateInstruction": true, + "rows": 4 + } + ], + "id": "llmAgentflow_0-input-llmMessages-array", + "display": true + }, + { + "label": "Enable Memory", + "name": "llmEnableMemory", + "type": "boolean", + "description": "Enable memory for the conversation thread", + "default": true, + "optional": true, + "id": "llmAgentflow_0-input-llmEnableMemory-boolean", + "display": true + }, + { + "label": "Memory Type", + "name": "llmMemoryType", + "type": "options", + "options": [ + { + "label": "All Messages", + "name": "allMessages", + "description": "Retrieve all messages from the conversation" + }, + { + "label": "Window Size", + "name": "windowSize", + "description": "Uses a fixed window size to surface the last N messages" + }, + { + "label": "Conversation Summary", + "name": "conversationSummary", + "description": "Summarizes the whole conversation" + }, + { + "label": "Conversation Summary Buffer", + "name": "conversationSummaryBuffer", + "description": "Summarize conversations once token limit is reached. Default to 2000" + } + ], + "optional": true, + "default": "allMessages", + "show": { + "llmEnableMemory": true + }, + "id": "llmAgentflow_0-input-llmMemoryType-options", + "display": true + }, + { + "label": "Window Size", + "name": "llmMemoryWindowSize", + "type": "number", + "default": "20", + "description": "Uses a fixed window size to surface the last N messages", + "show": { + "llmMemoryType": "windowSize" + }, + "id": "llmAgentflow_0-input-llmMemoryWindowSize-number", + "display": false + }, + { + "label": "Max Token Limit", + "name": "llmMemoryMaxTokenLimit", + "type": "number", + "default": "2000", + "description": "Summarize conversations once token limit is reached. Default to 2000", + "show": { + "llmMemoryType": "conversationSummaryBuffer" + }, + "id": "llmAgentflow_0-input-llmMemoryMaxTokenLimit-number", + "display": false + }, + { + "label": "Input Message", + "name": "llmUserMessage", + "type": "string", + "description": "Add an input message as user message at the end of the conversation", + "rows": 4, + "optional": true, + "acceptVariable": true, + "show": { + "llmEnableMemory": true + }, + "id": "llmAgentflow_0-input-llmUserMessage-string", + "display": true + }, + { + "label": "Return Response As", + "name": "llmReturnResponseAs", + "type": "options", + "options": [ + { + "label": "User Message", + "name": "userMessage" + }, + { + "label": "Assistant Message", + "name": "assistantMessage" + } + ], + "default": "userMessage", + "id": "llmAgentflow_0-input-llmReturnResponseAs-options", + "display": true + }, + { + "label": "JSON Structured Output", + "name": "llmStructuredOutput", + "description": "Instruct the LLM to give output in a JSON structured schema", + "type": "array", + "optional": true, + "acceptVariable": true, + "array": [ + { + "label": "Key", + "name": "key", + "type": "string" + }, + { + "label": "Type", + "name": "type", + "type": "options", + "options": [ + { + "label": "String", + "name": "string" + }, + { + "label": "String Array", + "name": "stringArray" + }, + { + "label": "Number", + "name": "number" + }, + { + "label": "Boolean", + "name": "boolean" + }, + { + "label": "Enum", + "name": "enum" + }, + { + "label": "JSON Array", + "name": "jsonArray" + } + ] + }, + { + "label": "Enum Values", + "name": "enumValues", + "type": "string", + "placeholder": "value1, value2, value3", + "description": "Enum values. Separated by comma", + "optional": true, + "show": { + "llmStructuredOutput[$index].type": "enum" + } + }, + { + "label": "JSON Schema", + "name": "jsonSchema", + "type": "code", + "placeholder": "{\n \"answer\": {\n \"type\": \"string\",\n \"description\": \"Value of the answer\"\n },\n \"reason\": {\n \"type\": \"string\",\n \"description\": \"Reason for the answer\"\n },\n \"optional\": {\n \"type\": \"boolean\"\n },\n \"count\": {\n \"type\": \"number\"\n },\n \"children\": {\n \"type\": \"array\",\n \"items\": {\n \"type\": \"object\",\n \"properties\": {\n \"value\": {\n \"type\": \"string\",\n \"description\": \"Value of the children's answer\"\n }\n }\n }\n }\n}", + "description": "JSON schema for the structured output", + "optional": true, + "show": { + "llmStructuredOutput[$index].type": "jsonArray" + } + }, + { + "label": "Description", + "name": "description", + "type": "string", + "placeholder": "Description of the key" + } + ], + "id": "llmAgentflow_0-input-llmStructuredOutput-array", + "display": true + }, + { + "label": "Update Flow State", + "name": "llmUpdateState", + "description": "Update runtime state during the execution of the workflow", + "type": "array", + "optional": true, + "acceptVariable": true, + "array": [ + { + "label": "Key", + "name": "key", + "type": "asyncOptions", + "loadMethod": "listRuntimeStateKeys", + "freeSolo": true + }, + { + "label": "Value", + "name": "value", + "type": "string", + "acceptVariable": true, + "acceptNodeOutputAsVariable": true + } + ], + "id": "llmAgentflow_0-input-llmUpdateState-array", + "display": true + } + ], + "inputAnchors": [], + "inputs": { + "llmModel": "chatAnthropic", + "llmMessages": [ + { + "role": "system", + "content": "

You are an expert research lead, focused on high-level research strategy, planning, efficient delegation to subagents, and final report writing. Your core goal is to be maximally helpful to the user by leading a process to research the user's query and then creating an excellent research report that answers this query very well. Take the current request from the user, plan out an effective research process to answer it as well as possible, and then execute this plan by delegating key tasks to appropriate subagents.

The current date is {{ current_date_time }} .

<research_process>

Follow this process to break down the userโ€™s question and develop an excellent research plan. Think about the user's task thoroughly and in great detail to understand it well and determine what to do next. Analyze each aspect of the user's question and identify the most important aspects. Consider multiple approaches with complete, thorough reasoning. Explore several different methods of answering the question (at least 3) and then choose the best method you find. Follow this process closely:

1. Assessment and breakdown: Analyze and break down the user's prompt to make sure you fully understand it.

* Identify the main concepts, key entities, and relationships in the task.

* List specific facts or data points needed to answer the question well.

* Note any temporal or contextual constraints on the question.

* Analyze what features of the prompt are most important - what does the user likely care about most here? What are they expecting or desiring in the final result? What tools do they expect to be used and how do we know?

* Determine what form the answer would need to be in to fully accomplish the user's task. Would it need to be a detailed report, a list of entities, an analysis of different perspectives, a visual report, or something else? What components will it need to have?

2. Query type determination: Explicitly state your reasoning on what type of query this question is from the categories below.

* Depth-first query: When the problem requires multiple perspectives on the same issue, and calls for \"going deep\" by analyzing a single topic from many angles.

- Benefits from parallel agents exploring different viewpoints, methodologies, or sources

- The core question remains singular but benefits from diverse approaches

- Example: \"What are the most effective treatments for depression?\" (benefits from parallel agents exploring different treatments and approaches to this question)

- Example: \"What really caused the 2008 financial crisis?\" (benefits from economic, regulatory, behavioral, and historical perspectives, and analyzing or steelmanning different viewpoints on the question)

- Example: \"can you identify the best approach to building AI finance agents in 2025 and why?\"

* Breadth-first query: When the problem can be broken into distinct, independent sub-questions, and calls for \"going wide\" by gathering information about each sub-question.

- Benefits from parallel agents each handling separate sub-topics.

- The query naturally divides into multiple parallel research streams or distinct, independently researchable sub-topics

- Example: \"Compare the economic systems of three Nordic countries\" (benefits from simultaneous independent research on each country)

- Example: \"What are the net worths and names of all the CEOs of all the fortune 500 companies?\" (intractable to research in a single thread; most efficient to split up into many distinct research agents which each gathers some of the necessary information)

- Example: \"Compare all the major frontend frameworks based on performance, learning curve, ecosystem, and industry adoption\" (best to identify all the frontend frameworks and then research all of these factors for each framework)

* Straightforward query: When the problem is focused, well-defined, and can be effectively answered by a single focused investigation or fetching a single resource from the internet.

- Can be handled effectively by a single subagent with clear instructions; does not benefit much from extensive research

- Example: \"What is the current population of Tokyo?\" (simple fact-finding)

- Example: \"What are all the fortune 500 companies?\" (just requires finding a single website with a full list, fetching that list, and then returning the results)

- Example: \"Tell me about bananas\" (fairly basic, short question that likely does not expect an extensive answer)

3. Detailed research plan development: Based on the query type, develop a specific research plan with clear allocation of tasks across different research subagents. Ensure if this plan is executed, it would result in an excellent answer to the user's query.

For Depth-first queries*:

- Define 3-5 different methodological approaches or perspectives.

- List specific expert viewpoints or sources of evidence that would enrich the analysis.

- Plan how each perspective will contribute unique insights to the central question.

- Specify how findings from different approaches will be synthesized.

- Example: For \"What causes obesity?\", plan agents to investigate genetic factors, environmental influences, psychological aspects, socioeconomic patterns, and biomedical evidence, and outline how the information could be aggregated into a great answer.

For Breadth-first queries*:

- Enumerate all the distinct sub-questions or sub-tasks that can be researched independently to answer the query.

- Identify the most critical sub-questions or perspectives needed to answer the query comprehensively. Only create additional subagents if the query has clearly distinct components that cannot be efficiently handled by fewer agents. Avoid creating subagents for every possible angle - focus on the essential ones.

- Prioritize these sub-tasks based on their importance and expected research complexity.

- Define extremely clear, crisp, and understandable boundaries between sub-topics to prevent overlap.

- Plan how findings will be aggregated into a coherent whole.

- Example: For \"Compare EU country tax systems\", first create a subagent to retrieve a list of all the countries in the EU today, then think about what metrics and factors would be relevant to compare each country's tax systems, then use the batch tool to run 4 subagents to research the metrics and factors for the key countries in Northern Europe, Western Europe, Eastern Europe, Southern Europe.

For Straightforward queries*:

- Identify the most direct, efficient path to the answer.

- Determine whether basic fact-finding or minor analysis is needed.

- Specify exact data points or information required to answer.

- Determine what sources are likely most relevant to answer this query that the subagents should use, and whether multiple sources are needed for fact-checking.

- Plan basic verification methods to ensure the accuracy of the answer.

- Create an extremely clear task description that describes how a subagent should research this question.

* For each element in your plan for answering any query, explicitly evaluate:

- Can this step be broken into independent subtasks for a more efficient process?

- Would multiple perspectives benefit this step?

- What specific output is expected from this step?

- Is this step strictly necessary to answer the user's query well?

4. Methodical plan execution: Execute the plan fully, using parallel subagents where possible. Determine how many subagents to use based on the complexity of the query, default to using 3 subagents for most queries.

* For parallelizable steps:

- Deploy appropriate subagents using the <delegation_instructions> below, making sure to provide extremely clear task descriptions to each subagent and ensuring that if these tasks are accomplished it would provide the information needed to answer the query.

- Synthesize findings when the subtasks are complete.

* For non-parallelizable/critical steps:

- First, attempt to accomplish them yourself based on your existing knowledge and reasoning. If the steps require additional research or up-to-date information from the web, deploy a subagent.

- If steps are very challenging, deploy independent subagents for additional perspectives or approaches.

- Compare the subagent's results and synthesize them using an ensemble approach and by applying critical reasoning.

* Throughout execution:

- Continuously monitor progress toward answering the user's query.

- Update the search plan and your subagent delegation strategy based on findings from tasks.

- Adapt to new information well - analyze the results, use Bayesian reasoning to update your priors, and then think carefully about what to do next.

- Adjust research depth based on time constraints and efficiency - if you are running out of time or a research process has already taken a very long time, avoid deploying further subagents and instead just start composing the output report immediately.

</research_process>

<subagent_count_guidelines>

When determining how many subagents to create, follow these guidelines:

1. Simple/Straightforward queries: create 1 subagent to collaborate with you directly -

- Example: \"What is the tax deadline this year?\" or โ€œResearch bananasโ€ โ†’ 1 subagent

- Even for simple queries, always create at least 1 subagent to ensure proper source gathering

2. Standard complexity queries: 2-3 subagents

- For queries requiring multiple perspectives or research approaches

- Example: \"Compare the top 3 cloud providers\" โ†’ 3 subagents (one per provider)

3. Medium complexity queries: 3-5 subagents

- For multi-faceted questions requiring different methodological approaches

- Example: \"Analyze the impact of AI on healthcare\" โ†’ 4 subagents (regulatory, clinical, economic, technological aspects)

4. High complexity queries: 5-10 subagents (maximum 20)

- For very broad, multi-part queries with many distinct components

- Identify the most effective algorithms to efficiently answer these high-complexity queries with around 20 subagents.

- Example: \"Fortune 500 CEOs birthplaces and ages\" โ†’ Divide the large info-gathering task into smaller segments (e.g., 10 subagents handling 50 CEOs each)

IMPORTANT: Never create more than 20 subagents unless strictly necessary. If a task seems to require more than 20 subagents, it typically means you should restructure your approach to consolidate similar sub-tasks and be more efficient in your research process. Prefer fewer, more capable subagents over many overly narrow ones. More subagents = more overhead. Only add subagents when they provide distinct value.

</subagent_count_guidelines>

<delegation_instructions>

Use subagents as your primary research team - they should perform all major research tasks:

1. Deployment strategy:

* Deploy subagents immediately after finalizing your research plan, so you can start the research process quickly.

* Create research subagent with very clear and specific instructions to describe the subagent's task.

* Each subagent is a fully capable researcher that can search the web and use the other search tools that are available.

* Consider priority and dependency when ordering subagent tasks - deploy the most important subagents first. For instance, when other tasks will depend on results from one specific task, always create a subagent to address that blocking task first.

* Ensure you have sufficient coverage for comprehensive research - ensure that you deploy subagents to complete every task.

* All substantial information gathering should be delegated to subagents.

* While waiting for a subagent to complete, use your time efficiently by analyzing previous results, updating your research plan, or reasoning about the user's query and how to answer it best.

2. Task allocation principles:

* For depth-first queries: Deploy subagents in sequence to explore different methodologies or perspectives on the same core question. Start with the approach most likely to yield comprehensive and good results, the follow with alternative viewpoints to fill gaps or provide contrasting analysis.

* For breadth-first queries: Order subagents by topic importance and research complexity. Begin with subagents that will establish key facts or framework information, then deploy subsequent subagents to explore more specific or dependent subtopics.

* For straightforward queries: Deploy a single comprehensive subagent with clear instructions for fact-finding and verification. For these simple queries, treat the subagent as an equal collaborator - you can conduct some research yourself while delegating specific research tasks to the subagent. Give this subagent very clear instructions and try to ensure the subagent handles about half of the work, to efficiently distribute research work between yourself and the subagent.

* Avoid deploying subagents for trivial tasks that you can complete yourself, such as simple calculations, basic formatting, small web searches, or tasks that don't require external research

* But always deploy at least 1 subagent, even for simple tasks.

* Avoid overlap between subagents - every subagent should have distinct, clearly separate tasks, to avoid replicating work unnecessarily and wasting resources.

3. Clear direction for subagents: Ensure that you provide every subagent with extremely detailed, specific, and clear instructions for what their task is and how to accomplish it.

* All instructions for subagents should include the following as appropriate:

- Specific research objectives, ideally just 1 core objective per subagent.

- Expected output format - e.g. a list of entities, a report of the facts, an answer to a specific question, or other.

- Relevant background context about the user's question and how the subagent should contribute to the research plan.

- Key questions to answer as part of the research.

- Suggested starting points and sources to use; define what constitutes reliable information or high-quality sources for this task, and list any unreliable sources to avoid.

- Specific tools that the subagent should use - i.e. using web search and web fetch for gathering information from the web, or if the query requires non-public, company-specific, or user-specific information, use the available internal tools like google drive, gmail, gcal, slack, or any other internal tools that are available currently.

- If needed, precise scope boundaries to prevent research drift.

* Make sure that IF all the subagents followed their instructions very well, the results in aggregate would allow you to give an EXCELLENT answer to the user's question - complete, thorough, detailed, and accurate.

* When giving instructions to subagents, also think about what sources might be high-quality for their tasks, and give them some guidelines on what sources to use and how they should evaluate source quality for each task.

* Example of a good, clear, detailed task description for a subagent: \"Research the semiconductor supply chain crisis and its current status as of 2025. Use the web_search and web_fetch tools to gather facts from the internet. Begin by examining recent quarterly reports from major chip manufacturers like TSMC, Samsung, and Intel, which can be found on their investor relations pages or through the SEC EDGAR database. Search for industry reports from SEMI, Gartner, and IDC that provide market analysis and forecasts. Investigate government responses by checking the US CHIPS Act implementation progress at commerce.gov, EU Chips Act at ec.europa.eu, and similar initiatives in Japan, South Korea, and Taiwan through their respective government portals. Prioritize original sources over news aggregators. Focus on identifying current bottlenecks, projected capacity increases from new fab construction, geopolitical factors affecting supply chains, and expert predictions for when supply will meet demand. When research is done, compile your findings into a dense report of the facts, covering the current situation, ongoing solutions, and future outlook, with specific timelines and quantitative data where available.\"

4. Synthesis responsibility: As the lead research agent, your primary role is to coordinate, guide, and synthesize - NOT to conduct primary research yourself. You only conduct direct research if a critical question remains unaddressed by subagents or it is best to accomplish it yourself. Instead, focus on planning, analyzing and integrating findings across subagents, determining what to do next, providing clear instructions for each subagent, or identifying gaps in the collective research and deploying new subagents to fill them.

</delegation_instructions>

<answer_formatting>

Before providing a final answer:

1. Review the most recent fact list compiled during the search process.

2. Reflect deeply on whether these facts can answer the given query sufficiently.

3. Identify if you need to create more subagents for further research.

4. If sufficient, provide a final answer in the specific format that is best for the user's query and following the <writing_guidelines> below.

4. Output the final result in Markdown to submit your final research report.

5. Do not include ANY Markdown citations, a separate agent will be responsible for citations. Never include a list of references or sources or citations at the end of the report.

</answer_formatting>

In communicating with subagents, maintain extremely high information density while being concise - describe everything needed in the fewest words possible.

As you progress through the search process:

1. When necessary, review the core facts gathered so far, including: f

* Facts from your own research.

* Facts reported by subagents.

* Specific dates, numbers, and quantifiable data.

2. For key facts, especially numbers, dates, and critical information:

* Note any discrepancies you observe between sources or issues with the quality of sources.

* When encountering conflicting information, prioritize based on recency, consistency with other facts, and use best judgment.

3. Think carefully after receiving novel information, especially for critical reasoning and decision-making after getting results back from subagents.

4. For the sake of efficiency, when you have reached the point where further research has diminishing returns and you can give a good enough answer to the user, STOP FURTHER RESEARCH and do not create any new subagents. Just write your final report at this point. Make sure to terminate research when it is no longer necessary, to avoid wasting time and resources. For example, if you are asked to identify the top 5 fastest-growing startups, and you have identified the most likely top 5 startups with high confidence, stop research immediately and use the complete_task tool to submit your report rather than continuing the process unnecessarily.

5. NEVER create a subagent to generate the final report - YOU write and craft this final research report yourself based on all the results and the writing instructions, and you are never allowed to use subagents to create the report.

6. Avoid creating subagents to research topics that could cause harm. Specifically, you must not create subagents to research anything that would promote hate speech, racism, violence, discrimination, or catastrophic harm. If a query is sensitive, specify clear constraints for the subagent to avoid causing harm.

</important_guidelines>

You have a query provided to you by the user, which serves as your primary goal. You should do your best to thoroughly accomplish the user's task. No clarifications will be given, therefore use your best judgment and do not attempt to ask the user questions. Before starting your work, review these instructions and the userโ€™s requirements, making sure to plan out how you will efficiently use subagents and parallel tool calls to answer the query. Critically think about the results provided by subagents and reason about them carefully to verify information and ensure you provide a high-quality, accurate report. Accomplish the userโ€™s task by directing the research subagents and creating an excellent research report from the information gathered.

" + }, + { + "role": "user", + "content": "

Query:

{{ $form.query }}

" + } + ], + "llmEnableMemory": true, + "llmReturnResponseAs": "userMessage", + "llmStructuredOutput": [ + { + "key": "subagents", + "type": "jsonArray", + "enumValues": "", + "jsonSchema": "{\n \"task\": {\n \"type\": \"string\",\n \"description\": \"The research task for subagent\"\n }\n}", + "description": "A list of subagents to perform research task" + } + ], + "llmUpdateState": [ + { + "key": "subagents", + "value": "

{{ output.subagents }}

" + } + ], + "llmModelConfig": { + "credential": "", + "modelName": "claude-sonnet-4-0", + "temperature": 0.9, + "streaming": true, + "maxTokensToSample": "", + "topP": "", + "topK": "", + "extendedThinking": "", + "budgetTokens": 1024, + "allowImageUploads": "", + "llmModel": "chatAnthropic" + }, + "llmUserMessage": "

" + }, + "outputAnchors": [ + { + "id": "llmAgentflow_0-output-llmAgentflow", + "label": "LLM", + "name": "llmAgentflow" + } + ], + "outputs": {}, + "selected": false + }, + "type": "agentFlow", + "width": 213, + "height": 72, + "selected": false, + "positionAbsolute": { + "x": -111.52635639216058, + "y": 83.67035986437665 + }, + "dragging": false + }, + { + "id": "iterationAgentflow_0", + "position": { + "x": 126.70987564816664, + "y": -5.337791594648138 + }, + "data": { + "id": "iterationAgentflow_0", + "label": "Spawn SubAgents", + "version": 1, + "name": "iterationAgentflow", + "type": "Iteration", + "color": "#9C89B8", + "baseClasses": ["Iteration"], + "category": "Agent Flows", + "description": "Execute the nodes within the iteration block through N iterations", + "inputParams": [ + { + "label": "Array Input", + "name": "iterationInput", + "type": "string", + "description": "The input array to iterate over", + "acceptVariable": true, + "rows": 4, + "id": "iterationAgentflow_0-input-iterationInput-string", + "display": true + } + ], + "inputAnchors": [], + "inputs": { + "iterationInput": "

{{ $flow.state.subagents }}

" + }, + "outputAnchors": [ + { + "id": "iterationAgentflow_0-output-iterationAgentflow", + "label": "Iteration", + "name": "iterationAgentflow" + } + ], + "outputs": {}, + "selected": false + }, + "type": "iteration", + "width": 300, + "height": 250, + "selected": false, + "positionAbsolute": { + "x": 126.70987564816664, + "y": -5.337791594648138 + }, + "dragging": false + }, + { + "id": "agentAgentflow_0", + "position": { + "x": 53.64516693688461, + "y": 77.49272566017132 + }, + "data": { + "id": "agentAgentflow_0", + "label": "SubAgent", + "version": 1, + "name": "agentAgentflow", + "type": "Agent", + "color": "#4DD0E1", + "baseClasses": ["Agent"], + "category": "Agent Flows", + "description": "Dynamically choose and utilize tools during runtime, enabling multi-step reasoning", + "inputParams": [ + { + "label": "Model", + "name": "agentModel", + "type": "asyncOptions", + "loadMethod": "listModels", + "loadConfig": true, + "id": "agentAgentflow_0-input-agentModel-asyncOptions", + "display": true + }, + { + "label": "Messages", + "name": "agentMessages", + "type": "array", + "optional": true, + "acceptVariable": true, + "array": [ + { + "label": "Role", + "name": "role", + "type": "options", + "options": [ + { + "label": "System", + "name": "system" + }, + { + "label": "Assistant", + "name": "assistant" + }, + { + "label": "Developer", + "name": "developer" + }, + { + "label": "User", + "name": "user" + } + ] + }, + { + "label": "Content", + "name": "content", + "type": "string", + "acceptVariable": true, + "generateInstruction": true, + "rows": 4 + } + ], + "id": "agentAgentflow_0-input-agentMessages-array", + "display": true + }, + { + "label": "Tools", + "name": "agentTools", + "type": "array", + "optional": true, + "array": [ + { + "label": "Tool", + "name": "agentSelectedTool", + "type": "asyncOptions", + "loadMethod": "listTools", + "loadConfig": true + }, + { + "label": "Require Human Input", + "name": "agentSelectedToolRequiresHumanInput", + "type": "boolean", + "optional": true + } + ], + "id": "agentAgentflow_0-input-agentTools-array", + "display": true + }, + { + "label": "Knowledge (Document Stores)", + "name": "agentKnowledgeDocumentStores", + "type": "array", + "description": "Give your agent context about different document sources. Document stores must be upserted in advance.", + "array": [ + { + "label": "Document Store", + "name": "documentStore", + "type": "asyncOptions", + "loadMethod": "listStores" + }, + { + "label": "Describe Knowledge", + "name": "docStoreDescription", + "type": "string", + "generateDocStoreDescription": true, + "placeholder": "Describe what the knowledge base is about, this is useful for the AI to know when and how to search for correct information", + "rows": 4 + }, + { + "label": "Return Source Documents", + "name": "returnSourceDocuments", + "type": "boolean", + "optional": true + } + ], + "optional": true, + "id": "agentAgentflow_0-input-agentKnowledgeDocumentStores-array", + "display": true + }, + { + "label": "Knowledge (Vector Embeddings)", + "name": "agentKnowledgeVSEmbeddings", + "type": "array", + "description": "Give your agent context about different document sources from existing vector stores and embeddings", + "array": [ + { + "label": "Vector Store", + "name": "vectorStore", + "type": "asyncOptions", + "loadMethod": "listVectorStores", + "loadConfig": true + }, + { + "label": "Embedding Model", + "name": "embeddingModel", + "type": "asyncOptions", + "loadMethod": "listEmbeddings", + "loadConfig": true + }, + { + "label": "Knowledge Name", + "name": "knowledgeName", + "type": "string", + "placeholder": "A short name for the knowledge base, this is useful for the AI to know when and how to search for correct information" + }, + { + "label": "Describe Knowledge", + "name": "knowledgeDescription", + "type": "string", + "placeholder": "Describe what the knowledge base is about, this is useful for the AI to know when and how to search for correct information", + "rows": 4 + }, + { + "label": "Return Source Documents", + "name": "returnSourceDocuments", + "type": "boolean", + "optional": true + } + ], + "optional": true, + "id": "agentAgentflow_0-input-agentKnowledgeVSEmbeddings-array", + "display": true + }, + { + "label": "Enable Memory", + "name": "agentEnableMemory", + "type": "boolean", + "description": "Enable memory for the conversation thread", + "default": true, + "optional": true, + "id": "agentAgentflow_0-input-agentEnableMemory-boolean", + "display": true + }, + { + "label": "Memory Type", + "name": "agentMemoryType", + "type": "options", + "options": [ + { + "label": "All Messages", + "name": "allMessages", + "description": "Retrieve all messages from the conversation" + }, + { + "label": "Window Size", + "name": "windowSize", + "description": "Uses a fixed window size to surface the last N messages" + }, + { + "label": "Conversation Summary", + "name": "conversationSummary", + "description": "Summarizes the whole conversation" + }, + { + "label": "Conversation Summary Buffer", + "name": "conversationSummaryBuffer", + "description": "Summarize conversations once token limit is reached. Default to 2000" + } + ], + "optional": true, + "default": "allMessages", + "show": { + "agentEnableMemory": true + }, + "id": "agentAgentflow_0-input-agentMemoryType-options", + "display": true + }, + { + "label": "Window Size", + "name": "agentMemoryWindowSize", + "type": "number", + "default": "20", + "description": "Uses a fixed window size to surface the last N messages", + "show": { + "agentMemoryType": "windowSize" + }, + "id": "agentAgentflow_0-input-agentMemoryWindowSize-number", + "display": false + }, + { + "label": "Max Token Limit", + "name": "agentMemoryMaxTokenLimit", + "type": "number", + "default": "2000", + "description": "Summarize conversations once token limit is reached. Default to 2000", + "show": { + "agentMemoryType": "conversationSummaryBuffer" + }, + "id": "agentAgentflow_0-input-agentMemoryMaxTokenLimit-number", + "display": false + }, + { + "label": "Input Message", + "name": "agentUserMessage", + "type": "string", + "description": "Add an input message as user message at the end of the conversation", + "rows": 4, + "optional": true, + "acceptVariable": true, + "show": { + "agentEnableMemory": true + }, + "id": "agentAgentflow_0-input-agentUserMessage-string", + "display": true + }, + { + "label": "Return Response As", + "name": "agentReturnResponseAs", + "type": "options", + "options": [ + { + "label": "User Message", + "name": "userMessage" + }, + { + "label": "Assistant Message", + "name": "assistantMessage" + } + ], + "default": "userMessage", + "id": "agentAgentflow_0-input-agentReturnResponseAs-options", + "display": true + }, + { + "label": "Update Flow State", + "name": "agentUpdateState", + "description": "Update runtime state during the execution of the workflow", + "type": "array", + "optional": true, + "acceptVariable": true, + "array": [ + { + "label": "Key", + "name": "key", + "type": "asyncOptions", + "loadMethod": "listRuntimeStateKeys", + "freeSolo": true + }, + { + "label": "Value", + "name": "value", + "type": "string", + "acceptVariable": true, + "acceptNodeOutputAsVariable": true + } + ], + "id": "agentAgentflow_0-input-agentUpdateState-array", + "display": true + } + ], + "inputAnchors": [], + "inputs": { + "agentModel": "chatAnthropic", + "agentMessages": [ + { + "role": "system", + "content": "

You are a research subagent working as part of a team. The current date is {{ current_date_time }}. You have been given a clear <task> provided by a lead agent, and should use your available tools to accomplish this task in a research process. Follow the instructions below closely to accomplish your specific <task> well:

<task>

{{ $iteration.task }}

</task>

<research_process>

  1. Planning: First, think through the task thoroughly. Make a research plan, carefully reasoning to review the requirements of the task, develop a research plan to fulfill these requirements, and determine what tools are most relevant and how they should be used optimally to fulfill the task.

  • As part of the plan, determine a 'research budget' - roughly how many tool calls to conduct to accomplish this task. Adapt the number of tool calls to the complexity of the query to be maximally efficient. For instance, simpler tasks like \"when is the tax deadline this year\" should result in under 5 tool calls, medium tasks should result in 5 tool calls, hard tasks result in about 10 tool calls, and very difficult or multi-part tasks should result in up to 15 tool calls. Stick to this budget to remain efficient - going over will hit your limits!

  1. Tool selection: Reason about what tools would be most helpful to use for this task. Use the right tools when a task implies they would be helpful.

  • Use BraveSearch API to research the topic, especially looking for different perspectives, counter-arguments, or aspects Agent 0 might not have covered. Identify URLs that seem promising for more detail.

  • If a URL from BraveSearch API (or one you already know) seems particularly important for your point or for adding nuance, use the Web Scraper Tool to get its full content.

  • Use Arxiv Search Tool for getting arxiv papers and contents.

  1. Research loop: Execute an excellent OODA (observe, orient, decide, act) loop by (a) observing what information has been gathered so far, what still needs to be gathered to accomplish the task, and what tools are available currently; (b) orienting toward what tools and queries would be best to gather the needed information and updating beliefs based on what has been learned so far; (c) making an informed, well-reasoned decision to use a specific tool in a certain way; (d) acting to use this tool. Repeat this loop in an efficient way to research well and learn based on new results.

  • Execute a MINIMUM of two distinct tool calls, up to five for complex queries. Avoid using more than five tool calls.

  • Reason carefully after receiving tool results. Make inferences based on each tool result and determine which tools to use next based on new findings in this process - e.g. if it seems like some info is not available on the web or some approach is not working, try using another tool or another query. Evaluate the quality of the sources in search results carefully. NEVER repeatedly use the exact same queries for the same tools, as this wastes resources and will not return new results.

Follow this process well to complete the task. Make sure to follow the <task> description and investigate the best sources.

</research_process>

<research_guidelines>

  1. Be detailed in your internal process, but more concise and information-dense in reporting the results.

  2. Avoid overly specific searches that might have poor hit rates:

    • Use moderately broad queries rather than hyper-specific ones.

    • Keep queries shorter since this will return more useful results - under 5 words.

    • If specific searches yield few results, broaden slightly.

    • Adjust specificity based on result quality - if results are abundant, narrow the query to get specific information.

    • Find the right balance between specific and general.

  3. For important facts, especially numbers and dates:

    • Keep track of findings and sources

    • Focus on high-value information that is:

    • Significant (has major implications for the task)

    • Important (directly relevant to the task or specifically requested)

    • Precise (specific facts, numbers, dates, or other concrete information)

    • High-quality (from excellent, reputable, reliable sources for the task)

* When encountering conflicting information, prioritize based on recency, consistency with other facts, the quality of the sources used, and use your best judgment and reasoning. If unable to reconcile facts, include the conflicting information in your final task report for the lead researcher to resolve.

4. Be specific and precise in your information gathering approach.

</research_guidelines>

<think_about_source_quality>

After receiving results from web searches or other tools, think critically, reason about the results, and determine what to do next. Pay attention to the details of tool results, and do not just take them at face value. For example, some pages may speculate about things that may happen in the future - mentioning predictions, using verbs like โ€œcouldโ€ or โ€œmayโ€, narrative driven speculation with future tense, quoted superlatives, financial projections, or similar - and you should make sure to note this explicitly in the final report, rather than accepting these events as having happened. Similarly, pay attention to the indicators of potentially problematic sources, like news aggregators rather than original sources of the information, false authority, pairing of passive voice with nameless sources, general qualifiers without specifics, unconfirmed reports, marketing language for a product, spin language, speculation, or misleading and cherry-picked data. Maintain epistemic honesty and practice good reasoning by ensuring sources are high-quality and only reporting accurate information to the lead researcher. If there are potential issues with results, flag these issues when returning your report to the lead researcher rather than blindly presenting all results as established facts.

DO NOT use the evaluate_source_quality tool ever - ignore this tool. It is broken and using it will not work.

</think_about_source_quality>

<use_parallel_tool_calls>

For maximum efficiency, whenever you need to perform multiple independent operations, invoke 2 relevant tools simultaneously rather than sequentially. Prefer calling tools like web search in parallel rather than by themselves.

</use_parallel_tool_calls>

<maximum_tool_call_limit>

To prevent overloading the system, it is required that you stay under a limit of 5 tool calls and under about 10 sources. This is the absolute maximum upper limit. If you exceed this limit, the subagent will be terminated. Therefore, whenever you get to around 4 tool calls or 9 sources, make sure to stop gathering sources, and instead finish it immediately. Avoid continuing to use tools when you see diminishing returns - when you are no longer finding new relevant information and results are not getting better, STOP using tools and instead compose your final report.

</maximum_tool_call_limit>

<citations>

  1. Must include source link, pages, etc.

  2. Avoid citing unnecessarily: Not every statement needs a citation. Focus on citing key facts, conclusions, and substantive claims that are linked to sources rather than common knowledge. Prioritize citing claims that readers would want to verify, that add credibility to the argument, or where a claim is clearly related to a specific source

  3. Cite meaningful semantic units: Citations should span complete thoughts, findings, or claims that make sense as standalone assertions. Avoid citing individual words or small phrase fragments that lose meaning out of context; prefer adding citations at the end of sentences

  4. Minimize sentence fragmentation: Avoid multiple citations within a single sentence that break up the flow of the sentence. Only add citations between phrases within a sentence when it is necessary to attribute specific claims within the sentence to specific sources

  5. No redundant citations close to each other: Do not place multiple citations to the same source in the same sentence, because this is redundant and unnecessary. If a sentence contains multiple citable claims from the same source, use only a single citation at the end of the sentence after the period

</citations>

Follow the <research_process> and the <research_guidelines> above to accomplish the task, making sure to parallelize tool calls for maximum efficiency. Remember to use correct tool to retrieve full results rather than just using search snippets. Continue using the relevant tools until this task has been fully accomplished, all necessary information has been gathered, and you are ready to report the results to the lead research agent to be integrated into a final result. As soon as you have the necessary information, complete the task rather than wasting time by continuing research unnecessarily. As soon as the task is done, finish and provide your detailed, condensed, complete, accurate report with citations.

" + } + ], + "agentTools": [ + { + "agentSelectedTool": "arxiv", + "agentSelectedToolRequiresHumanInput": "", + "agentSelectedToolConfig": { + "arxivName": "arxiv_search", + "arxivDescription": "Use this tool to search for academic papers on Arxiv. You can search by keywords, topics, authors, or specific Arxiv IDs. The tool can return either paper summaries or download and extract full paper content.", + "topKResults": "3", + "maxQueryLength": "300", + "docContentCharsMax": "5000", + "loadFullContent": true, + "continueOnFailure": true, + "legacyBuild": "", + "agentSelectedTool": "arxiv" + } + }, + { + "agentSelectedTool": "googleCustomSearch", + "agentSelectedToolRequiresHumanInput": "", + "agentSelectedToolConfig": { + "credential": "", + "agentSelectedTool": "googleCustomSearch" + } + }, + { + "agentSelectedTool": "webScraperTool", + "agentSelectedToolRequiresHumanInput": "", + "agentSelectedToolConfig": { + "scrapeMode": "recursive", + "maxDepth": 1, + "maxPages": "2", + "timeoutS": 60, + "description": "", + "agentSelectedTool": "webScraperTool" + } + } + ], + "agentKnowledgeDocumentStores": "", + "agentEnableMemory": true, + "agentMemoryType": "allMessages", + "agentUserMessage": "

Research task:

{{ $iteration.task }}

", + "agentReturnResponseAs": "userMessage", + "agentUpdateState": "", + "agentModelConfig": { + "credential": "", + "modelName": "claude-sonnet-4-0", + "temperature": 0.9, + "streaming": true, + "maxTokensToSample": "", + "topP": "", + "topK": "", + "extendedThinking": "", + "budgetTokens": 1024, + "allowImageUploads": "", + "agentModel": "chatAnthropic" + } + }, + "outputAnchors": [ + { + "id": "agentAgentflow_0-output-agentAgentflow", + "label": "Agent", + "name": "agentAgentflow" + } + ], + "outputs": {}, + "selected": false + }, + "type": "agentFlow", + "parentNode": "iterationAgentflow_0", + "extent": "parent", + "width": 213, + "height": 100, + "selected": false, + "positionAbsolute": { + "x": 180.35504258505125, + "y": 72.15493406552318 + }, + "dragging": false + }, + { + "id": "agentAgentflow_1", + "position": { + "x": 457.5784259377066, + "y": 83.96506302841382 + }, + "data": { + "id": "agentAgentflow_1", + "label": "Writer Agent", + "version": 1, + "name": "agentAgentflow", + "type": "Agent", + "color": "#4DD0E1", + "baseClasses": ["Agent"], + "category": "Agent Flows", + "description": "Dynamically choose and utilize tools during runtime, enabling multi-step reasoning", + "inputParams": [ + { + "label": "Model", + "name": "agentModel", + "type": "asyncOptions", + "loadMethod": "listModels", + "loadConfig": true, + "id": "agentAgentflow_1-input-agentModel-asyncOptions", + "display": true + }, + { + "label": "Messages", + "name": "agentMessages", + "type": "array", + "optional": true, + "acceptVariable": true, + "array": [ + { + "label": "Role", + "name": "role", + "type": "options", + "options": [ + { + "label": "System", + "name": "system" + }, + { + "label": "Assistant", + "name": "assistant" + }, + { + "label": "Developer", + "name": "developer" + }, + { + "label": "User", + "name": "user" + } + ] + }, + { + "label": "Content", + "name": "content", + "type": "string", + "acceptVariable": true, + "generateInstruction": true, + "rows": 4 + } + ], + "id": "agentAgentflow_1-input-agentMessages-array", + "display": true + }, + { + "label": "Tools", + "name": "agentTools", + "type": "array", + "optional": true, + "array": [ + { + "label": "Tool", + "name": "agentSelectedTool", + "type": "asyncOptions", + "loadMethod": "listTools", + "loadConfig": true + }, + { + "label": "Require Human Input", + "name": "agentSelectedToolRequiresHumanInput", + "type": "boolean", + "optional": true + } + ], + "id": "agentAgentflow_1-input-agentTools-array", + "display": true + }, + { + "label": "Knowledge (Document Stores)", + "name": "agentKnowledgeDocumentStores", + "type": "array", + "description": "Give your agent context about different document sources. Document stores must be upserted in advance.", + "array": [ + { + "label": "Document Store", + "name": "documentStore", + "type": "asyncOptions", + "loadMethod": "listStores" + }, + { + "label": "Describe Knowledge", + "name": "docStoreDescription", + "type": "string", + "generateDocStoreDescription": true, + "placeholder": "Describe what the knowledge base is about, this is useful for the AI to know when and how to search for correct information", + "rows": 4 + }, + { + "label": "Return Source Documents", + "name": "returnSourceDocuments", + "type": "boolean", + "optional": true + } + ], + "optional": true, + "id": "agentAgentflow_1-input-agentKnowledgeDocumentStores-array", + "display": true + }, + { + "label": "Knowledge (Vector Embeddings)", + "name": "agentKnowledgeVSEmbeddings", + "type": "array", + "description": "Give your agent context about different document sources from existing vector stores and embeddings", + "array": [ + { + "label": "Vector Store", + "name": "vectorStore", + "type": "asyncOptions", + "loadMethod": "listVectorStores", + "loadConfig": true + }, + { + "label": "Embedding Model", + "name": "embeddingModel", + "type": "asyncOptions", + "loadMethod": "listEmbeddings", + "loadConfig": true + }, + { + "label": "Knowledge Name", + "name": "knowledgeName", + "type": "string", + "placeholder": "A short name for the knowledge base, this is useful for the AI to know when and how to search for correct information" + }, + { + "label": "Describe Knowledge", + "name": "knowledgeDescription", + "type": "string", + "placeholder": "Describe what the knowledge base is about, this is useful for the AI to know when and how to search for correct information", + "rows": 4 + }, + { + "label": "Return Source Documents", + "name": "returnSourceDocuments", + "type": "boolean", + "optional": true + } + ], + "optional": true, + "id": "agentAgentflow_1-input-agentKnowledgeVSEmbeddings-array", + "display": true + }, + { + "label": "Enable Memory", + "name": "agentEnableMemory", + "type": "boolean", + "description": "Enable memory for the conversation thread", + "default": true, + "optional": true, + "id": "agentAgentflow_1-input-agentEnableMemory-boolean", + "display": true + }, + { + "label": "Memory Type", + "name": "agentMemoryType", + "type": "options", + "options": [ + { + "label": "All Messages", + "name": "allMessages", + "description": "Retrieve all messages from the conversation" + }, + { + "label": "Window Size", + "name": "windowSize", + "description": "Uses a fixed window size to surface the last N messages" + }, + { + "label": "Conversation Summary", + "name": "conversationSummary", + "description": "Summarizes the whole conversation" + }, + { + "label": "Conversation Summary Buffer", + "name": "conversationSummaryBuffer", + "description": "Summarize conversations once token limit is reached. Default to 2000" + } + ], + "optional": true, + "default": "allMessages", + "show": { + "agentEnableMemory": true + }, + "id": "agentAgentflow_1-input-agentMemoryType-options", + "display": false + }, + { + "label": "Window Size", + "name": "agentMemoryWindowSize", + "type": "number", + "default": "20", + "description": "Uses a fixed window size to surface the last N messages", + "show": { + "agentMemoryType": "windowSize" + }, + "id": "agentAgentflow_1-input-agentMemoryWindowSize-number", + "display": false + }, + { + "label": "Max Token Limit", + "name": "agentMemoryMaxTokenLimit", + "type": "number", + "default": "2000", + "description": "Summarize conversations once token limit is reached. Default to 2000", + "show": { + "agentMemoryType": "conversationSummaryBuffer" + }, + "id": "agentAgentflow_1-input-agentMemoryMaxTokenLimit-number", + "display": false + }, + { + "label": "Input Message", + "name": "agentUserMessage", + "type": "string", + "description": "Add an input message as user message at the end of the conversation", + "rows": 4, + "optional": true, + "acceptVariable": true, + "show": { + "agentEnableMemory": true + }, + "id": "agentAgentflow_1-input-agentUserMessage-string", + "display": false + }, + { + "label": "Return Response As", + "name": "agentReturnResponseAs", + "type": "options", + "options": [ + { + "label": "User Message", + "name": "userMessage" + }, + { + "label": "Assistant Message", + "name": "assistantMessage" + } + ], + "default": "userMessage", + "id": "agentAgentflow_1-input-agentReturnResponseAs-options", + "display": true + }, + { + "label": "Update Flow State", + "name": "agentUpdateState", + "description": "Update runtime state during the execution of the workflow", + "type": "array", + "optional": true, + "acceptVariable": true, + "array": [ + { + "label": "Key", + "name": "key", + "type": "asyncOptions", + "loadMethod": "listRuntimeStateKeys", + "freeSolo": true + }, + { + "label": "Value", + "name": "value", + "type": "string", + "acceptVariable": true, + "acceptNodeOutputAsVariable": true + } + ], + "id": "agentAgentflow_1-input-agentUpdateState-array", + "display": true + } + ], + "inputAnchors": [], + "inputs": { + "agentModel": "chatGoogleGenerativeAI", + "agentMessages": [ + { + "role": "system", + "content": "

You are an expert research writer tasked with generating a high-quality, long-form Markdown report based on raw research findings. Your primary responsibility is to transform complex, fragmented, or unstructured research inputs into a coherent, professional report that fully answers the user's original query. This report should be suitable for audience seeking a deep understanding of the subject.

Your guiding principles:

  1. Preserve Full Context
    Include all relevant findings, explanations, and perspectives from the original materials. Do not omit, summarize, or oversimplify key information. Your job is to retain depth and nuance while improving structure and clarity.

  2. Maintain Citation Integrity
    Ensure all citations and source links from the original findings are accurately preserved in the final report. Do not invent, remove, or alter sources. If citations are embedded inline in the source findings, carry them forward appropriately.

  3. Add Structure and Clarity
    Organize the content into a well-structured Markdown format. Use clear section headings, bullet points, numbered lists, tables and formatting as needed to improve readability and flow. Start with Introduction, end with Conclusion, and lastly sources.

  4. Markdown Output Only
    Your final output must be in Markdown format. Do not include explanations, side notes, or appendices. The only output should be the fully composed report ready for submission.

Writing guidelines:

  1. Title: A clear, compelling title for the report that reflects the core subject.

  2. Abstract/Executive Summary: A concise overview (approx. 200-300 words) of the report main arguments, scope, and conclusions, derived from the conversation.

  3. Introduction:

    • Clearly define the central problem, question, or theme that the report will address

    • Outline the report's structure and objectives.

  4. Main Body / Thematic Analysis (Multiple Sections):

    • Deconstruct and Synthesize Key Arguments: Detail the principal arguments, propositions, and evidence presented by all findings. Go beyond mere listing; analyze the strengths, weaknesses, and underlying assumptions of their positions.

    • Explore Core Themes and Concepts: Identify and elaborate on the major themes and concepts that emerged.

    • Analyze the Evolution of the Discussion: Trace how the understanding of the subject evolved throughout the findings. Highlight any shifts in perspective, critical turning points, challenged assumptions, or moments of significant clarification.

    • Evidence and Examples: Where the findings provided examples or evidence, incorporate and potentially expand upon these to support the report's analysis.

  5. Synthesis of Insights and Key Conclusions:

    • Draw together the most significant insights and conclusions that can be derived from the entirety of the conversation.

    • This section should offer a consolidated understanding of the subject.

  6. Implications and Future Directions:

    • Discuss the broader implications of the insights and conclusions reached.

    • Identify any unresolved questions, ambiguities, or areas that the conversation indicated require further exploration or research.

    • Suggest potential next steps or future avenues of inquiry.

  7. Conclusion: A strong concluding section summarizing the report's main findings, their significance, and a final thought on the subject.

Style and Tone:

  • Extensive and In-depth: The paper should be thorough and detailed.

  • Well-Structured: Use clear headings, subheadings, and logical flow.

  • Analytical and Critical: Do not just report; analyze, interpret, and critically engage with the ideas.

  • Objective and Authoritative: The report should present a balanced and well-reasoned perspective.

  • Formal and Professional Language: Maintain a tone appropriate for the report.

" + }, + { + "role": "user", + "content": "

<research_topic>

{{ $form.query }}

</research_topic>

<existing_findings>

{{ $flow.state.findings }}

</existing_findings>

<new_findings>

{{ iterationAgentflow_0 }}

</new_findings>

" + } + ], + "agentTools": "", + "agentKnowledgeDocumentStores": "", + "agentEnableMemory": false, + "agentReturnResponseAs": "userMessage", + "agentUpdateState": [ + { + "key": "findings", + "value": "

{{ output }}

" + } + ], + "agentModelConfig": { + "credential": "", + "modelName": "gemini-2.5-flash-preview-05-20", + "customModelName": "", + "temperature": 0.9, + "streaming": true, + "maxOutputTokens": "", + "topP": "", + "topK": "", + "harmCategory": "", + "harmBlockThreshold": "", + "baseUrl": "", + "allowImageUploads": "", + "agentModel": "chatGoogleGenerativeAI" + }, + "undefined": "" + }, + "outputAnchors": [ + { + "id": "agentAgentflow_1-output-agentAgentflow", + "label": "Agent", + "name": "agentAgentflow" + } + ], + "outputs": {}, + "selected": false + }, + "type": "agentFlow", + "width": 284, + "height": 72, + "selected": false, + "positionAbsolute": { + "x": 457.5784259377066, + "y": 83.96506302841382 + }, + "dragging": false + }, + { + "id": "stickyNoteAgentflow_0", + "position": { + "x": 186.43721235573946, + "y": -175.0715078328168 + }, + "data": { + "id": "stickyNoteAgentflow_0", + "label": "Sticky Note", + "version": 1, + "name": "stickyNoteAgentflow", + "type": "StickyNote", + "color": "#fee440", + "baseClasses": ["StickyNote"], + "category": "Agent Flows", + "description": "Add notes to the agent flow", + "inputParams": [ + { + "label": "", + "name": "note", + "type": "string", + "rows": 1, + "placeholder": "Type something here", + "optional": true, + "id": "stickyNoteAgentflow_0-input-note-string", + "display": true + } + ], + "inputAnchors": [], + "inputs": { + "note": "Each SubAgent has its own research task and tools to complete its findings" + }, + "outputAnchors": [ + { + "id": "stickyNoteAgentflow_0-output-stickyNoteAgentflow", + "label": "Sticky Note", + "name": "stickyNoteAgentflow" + } + ], + "outputs": {}, + "selected": false + }, + "type": "stickyNote", + "width": 210, + "height": 123, + "selected": false, + "positionAbsolute": { + "x": 186.43721235573946, + "y": -175.0715078328168 + }, + "dragging": false + }, + { + "id": "stickyNoteAgentflow_1", + "position": { + "x": -117.00547059767304, + "y": -24.08438212240118 + }, + "data": { + "id": "stickyNoteAgentflow_1", + "label": "Sticky Note (1)", + "version": 1, + "name": "stickyNoteAgentflow", + "type": "StickyNote", + "color": "#fee440", + "baseClasses": ["StickyNote"], + "category": "Agent Flows", + "description": "Add notes to the agent flow", + "inputParams": [ + { + "label": "", + "name": "note", + "type": "string", + "rows": 1, + "placeholder": "Type something here", + "optional": true, + "id": "stickyNoteAgentflow_1-input-note-string", + "display": true + } + ], + "inputAnchors": [], + "inputs": { + "note": "Planner will generate list of subagents" + }, + "outputAnchors": [ + { + "id": "stickyNoteAgentflow_1-output-stickyNoteAgentflow", + "label": "Sticky Note", + "name": "stickyNoteAgentflow" + } + ], + "outputs": {}, + "selected": false + }, + "type": "stickyNote", + "width": 210, + "height": 82, + "selected": false, + "positionAbsolute": { + "x": -117.00547059767304, + "y": -24.08438212240118 + }, + "dragging": false + }, + { + "id": "conditionAgentAgentflow_0", + "position": { + "x": 775.5108094609307, + "y": 79.60273632963377 + }, + "data": { + "id": "conditionAgentAgentflow_0", + "label": "More SubAgents?", + "version": 1.1, + "name": "conditionAgentAgentflow", + "type": "ConditionAgent", + "color": "#ff8fab", + "baseClasses": ["ConditionAgent"], + "category": "Agent Flows", + "description": "Utilize an agent to split flows based on dynamic conditions", + "inputParams": [ + { + "label": "Model", + "name": "conditionAgentModel", + "type": "asyncOptions", + "loadMethod": "listModels", + "loadConfig": true, + "id": "conditionAgentAgentflow_0-input-conditionAgentModel-asyncOptions", + "display": true + }, + { + "label": "Instructions", + "name": "conditionAgentInstructions", + "type": "string", + "description": "A general instructions of what the condition agent should do", + "rows": 4, + "acceptVariable": true, + "placeholder": "Determine if the user is interested in learning about AI", + "id": "conditionAgentAgentflow_0-input-conditionAgentInstructions-string", + "display": true + }, + { + "label": "Input", + "name": "conditionAgentInput", + "type": "string", + "description": "Input to be used for the condition agent", + "rows": 4, + "acceptVariable": true, + "default": "

{{ question }}

", + "id": "conditionAgentAgentflow_0-input-conditionAgentInput-string", + "display": true + }, + { + "label": "Scenarios", + "name": "conditionAgentScenarios", + "description": "Define the scenarios that will be used as the conditions to split the flow", + "type": "array", + "array": [ + { + "label": "Scenario", + "name": "scenario", + "type": "string", + "placeholder": "User is asking for a pizza" + } + ], + "default": [ + { + "scenario": "More subagents needed" + }, + { + "scenario": "It is sufficient" + } + ], + "id": "conditionAgentAgentflow_0-input-conditionAgentScenarios-array", + "display": true + }, + { + "label": "Override System Prompt", + "name": "conditionAgentOverrideSystemPrompt", + "type": "boolean", + "description": "Override initial system prompt for Condition Agent", + "optional": true, + "id": "conditionAgentAgentflow_0-input-conditionAgentOverrideSystemPrompt-boolean", + "display": true + }, + { + "label": "Node System Prompt", + "name": "conditionAgentSystemPrompt", + "type": "string", + "rows": 4, + "optional": true, + "acceptVariable": true, + "default": "

You are part of a multi-agent system designed to make agent coordination and execution easy. Your task is to analyze the given input and select one matching scenario from a provided set of scenarios.

\n
    \n
  • Input: A string representing the user's query, message or data.
  • \n
  • Scenarios: A list of predefined scenarios that relate to the input.
  • \n
  • Instruction: Determine which of the provided scenarios is the best fit for the input.
  • \n
\n

Steps

\n
    \n
  1. Read the input string and the list of scenarios.
  2. \n
  3. Analyze the content of the input to identify its main topic or intention.
  4. \n
  5. Compare the input with each scenario: Evaluate how well the input's topic or intention aligns with each of the provided scenarios and select the one that is the best fit.
  6. \n
  7. Output the result: Return the selected scenario in the specified JSON format.
  8. \n
\n

Output Format

\n

Output should be a JSON object that names the selected scenario, like this: {\"output\": \"\"}. No explanation is needed.

\n

Examples

\n
    \n
  1. \n

    Input: {\"input\": \"Hello\", \"scenarios\": [\"user is asking about AI\", \"user is not asking about AI\"], \"instruction\": \"Your task is to check if the user is asking about AI.\"}

    \n

    Output: {\"output\": \"user is not asking about AI\"}

    \n
  2. \n
  3. \n

    Input: {\"input\": \"What is AIGC?\", \"scenarios\": [\"user is asking about AI\", \"user is asking about the weather\"], \"instruction\": \"Your task is to check and see if the user is asking a topic about AI.\"}

    \n

    Output: {\"output\": \"user is asking about AI\"}

    \n
  4. \n
  5. \n

    Input: {\"input\": \"Can you explain deep learning?\", \"scenarios\": [\"user is interested in AI topics\", \"user wants to order food\"], \"instruction\": \"Determine if the user is interested in learning about AI.\"}

    \n

    Output: {\"output\": \"user is interested in AI topics\"}

    \n
  6. \n
\n

Note

\n
    \n
  • Ensure that the input scenarios align well with potential user queries for accurate matching.
  • \n
  • DO NOT include anything other than the JSON in your response.
  • \n
", + "description": "Expert use only. Modifying this can significantly alter agent behavior. Leave default if unsure", + "show": { + "conditionAgentOverrideSystemPrompt": true + }, + "id": "conditionAgentAgentflow_0-input-conditionAgentSystemPrompt-string", + "display": false + } + ], + "inputAnchors": [], + "inputs": { + "conditionAgentModel": "chatGoogleGenerativeAI", + "conditionAgentInstructions": "

Given a research topic, previous subagents and their findings, determine if more subagents are needed for further research or the findings are sufficient for the research topic

", + "conditionAgentInput": "

<research_topic>

{{ $form.query }}

</research_topic>

<subagents>

{{ $flow.state.subagents }}

</subagents>

<findings>

{{ $flow.state.findings }}

</findings>

", + "conditionAgentScenarios": [ + { + "scenario": "More subagents are needed" + }, + { + "scenario": "Findings are sufficient" + } + ], + "conditionAgentOverrideSystemPrompt": "", + "conditionAgentModelConfig": { + "credential": "", + "modelName": "gemini-2.0-flash-lite", + "customModelName": "", + "temperature": 0.9, + "streaming": true, + "maxOutputTokens": "", + "topP": "", + "topK": "", + "harmCategory": "", + "harmBlockThreshold": "", + "baseUrl": "", + "allowImageUploads": "", + "conditionAgentModel": "chatGoogleGenerativeAI" + }, + "undefined": "" + }, + "outputAnchors": [ + { + "id": "conditionAgentAgentflow_0-output-0", + "label": "Condition Agent", + "name": "conditionAgentAgentflow" + }, + { + "id": "conditionAgentAgentflow_0-output-1", + "label": "Condition Agent", + "name": "conditionAgentAgentflow" + } + ], + "outputs": { + "conditionAgentAgentflow": "" + }, + "selected": false + }, + "type": "agentFlow", + "width": 220, + "height": 80, + "selected": false, + "positionAbsolute": { + "x": 775.5108094609307, + "y": 79.60273632963377 + }, + "dragging": false + }, + { + "id": "loopAgentflow_0", + "position": { + "x": 1041.3074957535728, + "y": 20.713295322365383 + }, + "data": { + "id": "loopAgentflow_0", + "label": "Back to Planner", + "version": 1, + "name": "loopAgentflow", + "type": "Loop", + "color": "#FFA07A", + "hideOutput": true, + "baseClasses": ["Loop"], + "category": "Agent Flows", + "description": "Loop back to a previous node", + "inputParams": [ + { + "label": "Loop Back To", + "name": "loopBackToNode", + "type": "asyncOptions", + "loadMethod": "listPreviousNodes", + "freeSolo": true, + "id": "loopAgentflow_0-input-loopBackToNode-asyncOptions", + "display": true + }, + { + "label": "Max Loop Count", + "name": "maxLoopCount", + "type": "number", + "default": 5, + "id": "loopAgentflow_0-input-maxLoopCount-number", + "display": true + } + ], + "inputAnchors": [], + "inputs": { + "loopBackToNode": "llmAgentflow_0-Planner", + "maxLoopCount": "5" + }, + "outputAnchors": [], + "outputs": {}, + "selected": false + }, + "type": "agentFlow", + "width": 174, + "height": 66, + "selected": false, + "positionAbsolute": { + "x": 1041.3074957535728, + "y": 20.713295322365383 + }, + "dragging": false + }, + { + "id": "directReplyAgentflow_0", + "position": { + "x": 1046.735958385286, + "y": 140.25100072990062 + }, + "data": { + "id": "directReplyAgentflow_0", + "label": "Generate Report", + "version": 1, + "name": "directReplyAgentflow", + "type": "DirectReply", + "color": "#4DDBBB", + "hideOutput": true, + "baseClasses": ["DirectReply"], + "category": "Agent Flows", + "description": "Directly reply to the user with a message", + "inputParams": [ + { + "label": "Message", + "name": "directReplyMessage", + "type": "string", + "rows": 4, + "acceptVariable": true, + "id": "directReplyAgentflow_0-input-directReplyMessage-string", + "display": true + } + ], + "inputAnchors": [], + "inputs": { + "directReplyMessage": "

{{ $flow.state.findings }}

" + }, + "outputAnchors": [], + "outputs": {}, + "selected": false + }, + "type": "agentFlow", + "width": 179, + "height": 66, + "positionAbsolute": { + "x": 1046.735958385286, + "y": 140.25100072990062 + }, + "selected": false, + "dragging": false + }, + { + "id": "stickyNoteAgentflow_3", + "position": { + "x": 494.1635881448354, + "y": -47.5842428829507 + }, + "data": { + "id": "stickyNoteAgentflow_3", + "label": "Sticky Note (3)", + "version": 1, + "name": "stickyNoteAgentflow", + "type": "StickyNote", + "color": "#fee440", + "baseClasses": ["StickyNote"], + "category": "Agent Flows", + "description": "Add notes to the agent flow", + "inputParams": [ + { + "label": "", + "name": "note", + "type": "string", + "rows": 1, + "placeholder": "Type something here", + "optional": true, + "id": "stickyNoteAgentflow_3-input-note-string", + "display": true + } + ], + "inputAnchors": [], + "inputs": { + "note": "Write Agent combine the findings and generate an updated report" + }, + "outputAnchors": [ + { + "id": "stickyNoteAgentflow_3-output-stickyNoteAgentflow", + "label": "Sticky Note", + "name": "stickyNoteAgentflow" + } + ], + "outputs": {}, + "selected": false + }, + "type": "stickyNote", + "width": 210, + "height": 123, + "selected": false, + "positionAbsolute": { + "x": 494.1635881448354, + "y": -47.5842428829507 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "startAgentflow_0", + "sourceHandle": "startAgentflow_0-output-startAgentflow", + "target": "llmAgentflow_0", + "targetHandle": "llmAgentflow_0", + "data": { + "sourceColor": "#7EE787", + "targetColor": "#64B5F6", + "isHumanInput": false + }, + "type": "agentFlow", + "id": "startAgentflow_0-startAgentflow_0-output-startAgentflow-llmAgentflow_0-llmAgentflow_0" + }, + { + "source": "llmAgentflow_0", + "sourceHandle": "llmAgentflow_0-output-llmAgentflow", + "target": "iterationAgentflow_0", + "targetHandle": "iterationAgentflow_0", + "data": { + "sourceColor": "#64B5F6", + "targetColor": "#9C89B8", + "isHumanInput": false + }, + "type": "agentFlow", + "id": "llmAgentflow_0-llmAgentflow_0-output-llmAgentflow-iterationAgentflow_0-iterationAgentflow_0" + }, + { + "source": "conditionAgentAgentflow_0", + "sourceHandle": "conditionAgentAgentflow_0-output-0", + "target": "loopAgentflow_0", + "targetHandle": "loopAgentflow_0", + "data": { + "sourceColor": "#ff8fab", + "targetColor": "#FFA07A", + "edgeLabel": "0", + "isHumanInput": false + }, + "type": "agentFlow", + "id": "conditionAgentAgentflow_0-conditionAgentAgentflow_0-output-0-loopAgentflow_0-loopAgentflow_0" + }, + { + "source": "iterationAgentflow_0", + "sourceHandle": "iterationAgentflow_0-output-iterationAgentflow", + "target": "agentAgentflow_1", + "targetHandle": "agentAgentflow_1", + "data": { + "sourceColor": "#9C89B8", + "targetColor": "#4DD0E1", + "isHumanInput": false + }, + "type": "agentFlow", + "id": "iterationAgentflow_0-iterationAgentflow_0-output-iterationAgentflow-agentAgentflow_1-agentAgentflow_1" + }, + { + "source": "agentAgentflow_1", + "sourceHandle": "agentAgentflow_1-output-agentAgentflow", + "target": "conditionAgentAgentflow_0", + "targetHandle": "conditionAgentAgentflow_0", + "data": { + "sourceColor": "#4DD0E1", + "targetColor": "#ff8fab", + "isHumanInput": false + }, + "type": "agentFlow", + "id": "agentAgentflow_1-agentAgentflow_1-output-agentAgentflow-conditionAgentAgentflow_0-conditionAgentAgentflow_0" + }, + { + "source": "conditionAgentAgentflow_0", + "sourceHandle": "conditionAgentAgentflow_0-output-1", + "target": "directReplyAgentflow_0", + "targetHandle": "directReplyAgentflow_0", + "data": { + "sourceColor": "#ff8fab", + "targetColor": "#4DDBBB", + "edgeLabel": "1", + "isHumanInput": false + }, + "type": "agentFlow", + "id": "conditionAgentAgentflow_0-conditionAgentAgentflow_0-output-1-directReplyAgentflow_0-directReplyAgentflow_0" + } + ] +} diff --git a/packages/server/marketplaces/agentflowsv2/Financial Research Agent.json b/packages/server/marketplaces/agentflowsv2/Human In The Loop.json similarity index 67% rename from packages/server/marketplaces/agentflowsv2/Financial Research Agent.json rename to packages/server/marketplaces/agentflowsv2/Human In The Loop.json index f72312e75..b60d04496 100644 --- a/packages/server/marketplaces/agentflowsv2/Financial Research Agent.json +++ b/packages/server/marketplaces/agentflowsv2/Human In The Loop.json @@ -1,18 +1,18 @@ { - "description": "A financial research agent that takes in a query, plan the steps, search the web, and return a detailed report", - "usecases": ["Finance & Accounting"], + "description": "An email reply HITL (human in the loop) agent that can proceed or refine the email with user input", + "usecases": ["Human In Loop"], "nodes": [ { "id": "startAgentflow_0", "type": "agentFlow", "position": { - "x": -234.94624728418063, - "y": 84.92919739582129 + "x": -201.62473061824977, + "y": 92.61621373702832 }, "data": { "id": "startAgentflow_0", "label": "Start", - "version": 1, + "version": 1.1, "name": "startAgentflow", "type": "Start", "color": "#7EE787", @@ -133,7 +133,8 @@ "name": "startEphemeralMemory", "type": "boolean", "description": "Start fresh for every execution without past chat history", - "optional": true + "optional": true, + "display": true }, { "label": "Flow State", @@ -157,27 +158,43 @@ ], "id": "startAgentflow_0-input-startState-array", "display": true + }, + { + "label": "Persist State", + "name": "startPersistState", + "type": "boolean", + "description": "Persist the state in the same session", + "optional": true, + "id": "startAgentflow_0-input-startPersistState-boolean", + "display": true } ], "inputAnchors": [], "inputs": { "startInputType": "formInput", - "formTitle": "Finanical Research", - "formDescription": "A financial research agent that takes in a query, and return a detailed report", + "formTitle": "Email Inquiry", + "formDescription": "Incoming email inquiry", "formInputTypes": [ { "type": "string", - "label": "Query", - "name": "query", + "label": "Subject", + "name": "subject", + "addOptions": "" + }, + { + "type": "string", + "label": "Body", + "name": "body", + "addOptions": "" + }, + { + "type": "string", + "label": "From", + "name": "from", "addOptions": "" } ], - "startState": [ - { - "key": "search_key_reason", - "value": "" - } - ] + "startState": "" }, "outputAnchors": [ { @@ -189,419 +206,24 @@ "outputs": {}, "selected": false }, - "width": 101, - "height": 65, + "width": 103, + "height": 66, "selected": false, "positionAbsolute": { - "x": -234.94624728418063, - "y": 84.92919739582129 - }, - "dragging": false - }, - { - "id": "llmAgentflow_0", - "position": { - "x": -92.42002168895628, - "y": 81.69973969492588 - }, - "data": { - "id": "llmAgentflow_0", - "label": "Planner", - "version": 1, - "name": "llmAgentflow", - "type": "LLM", - "color": "#64B5F6", - "baseClasses": ["LLM"], - "category": "Agent Flows", - "description": "Large language models to analyze user-provided inputs and generate responses", - "inputParams": [ - { - "label": "Model", - "name": "llmModel", - "type": "asyncOptions", - "loadMethod": "listModels", - "loadConfig": true, - "id": "llmAgentflow_0-input-llmModel-asyncOptions", - "display": true - }, - { - "label": "Messages", - "name": "llmMessages", - "type": "array", - "optional": true, - "acceptVariable": true, - "array": [ - { - "label": "Role", - "name": "role", - "type": "options", - "options": [ - { - "label": "System", - "name": "system" - }, - { - "label": "Assistant", - "name": "assistant" - }, - { - "label": "Developer", - "name": "developer" - }, - { - "label": "User", - "name": "user" - } - ] - }, - { - "label": "Content", - "name": "content", - "type": "string", - "acceptVariable": true, - "generateInstruction": true, - "rows": 4 - } - ], - "id": "llmAgentflow_0-input-llmMessages-array", - "display": true - }, - { - "label": "Enable Memory", - "name": "llmEnableMemory", - "type": "boolean", - "description": "Enable memory for the conversation thread", - "default": true, - "optional": true, - "id": "llmAgentflow_0-input-llmEnableMemory-boolean", - "display": true - }, - { - "label": "Memory Type", - "name": "llmMemoryType", - "type": "options", - "options": [ - { - "label": "All Messages", - "name": "allMessages", - "description": "Retrieve all messages from the conversation" - }, - { - "label": "Window Size", - "name": "windowSize", - "description": "Uses a fixed window size to surface the last N messages" - }, - { - "label": "Conversation Summary", - "name": "conversationSummary", - "description": "Summarizes the whole conversation" - }, - { - "label": "Conversation Summary Buffer", - "name": "conversationSummaryBuffer", - "description": "Summarize conversations once token limit is reached. Default to 2000" - } - ], - "optional": true, - "default": "allMessages", - "show": { - "llmEnableMemory": true - }, - "id": "llmAgentflow_0-input-llmMemoryType-options", - "display": true - }, - { - "label": "Window Size", - "name": "llmMemoryWindowSize", - "type": "number", - "default": "20", - "description": "Uses a fixed window size to surface the last N messages", - "show": { - "llmMemoryType": "windowSize" - }, - "id": "llmAgentflow_0-input-llmMemoryWindowSize-number", - "display": false - }, - { - "label": "Max Token Limit", - "name": "llmMemoryMaxTokenLimit", - "type": "number", - "default": "2000", - "description": "Summarize conversations once token limit is reached. Default to 2000", - "show": { - "llmMemoryType": "conversationSummaryBuffer" - }, - "id": "llmAgentflow_0-input-llmMemoryMaxTokenLimit-number", - "display": false - }, - { - "label": "Input Message", - "name": "llmUserMessage", - "type": "string", - "description": "Add an input message as user message at the end of the conversation", - "rows": 4, - "optional": true, - "acceptVariable": true, - "show": { - "llmEnableMemory": true - }, - "id": "llmAgentflow_0-input-llmUserMessage-string", - "display": true - }, - { - "label": "Return Response As", - "name": "llmReturnResponseAs", - "type": "options", - "options": [ - { - "label": "User Message", - "name": "userMessage" - }, - { - "label": "Assistant Message", - "name": "assistantMessage" - } - ], - "default": "userMessage", - "id": "llmAgentflow_0-input-llmReturnResponseAs-options", - "display": true - }, - { - "label": "JSON Structured Output", - "name": "llmStructuredOutput", - "description": "Instruct the LLM to give output in a JSON structured schema", - "type": "array", - "optional": true, - "acceptVariable": true, - "array": [ - { - "label": "Key", - "name": "key", - "type": "string" - }, - { - "label": "Type", - "name": "type", - "type": "options", - "options": [ - { - "label": "String", - "name": "string" - }, - { - "label": "String Array", - "name": "stringArray" - }, - { - "label": "Number", - "name": "number" - }, - { - "label": "Boolean", - "name": "boolean" - }, - { - "label": "Enum", - "name": "enum" - }, - { - "label": "JSON Array", - "name": "jsonArray" - } - ] - }, - { - "label": "Enum Values", - "name": "enumValues", - "type": "string", - "placeholder": "value1, value2, value3", - "description": "Enum values. Separated by comma", - "optional": true, - "show": { - "llmStructuredOutput[$index].type": "enum" - } - }, - { - "label": "JSON Schema", - "name": "jsonSchema", - "type": "code", - "placeholder": "{\n \"answer\": {\n \"type\": \"string\",\n \"description\": \"Value of the answer\"\n },\n \"reason\": {\n \"type\": \"string\",\n \"description\": \"Reason for the answer\"\n },\n \"optional\": {\n \"type\": \"boolean\"\n },\n \"count\": {\n \"type\": \"number\"\n },\n \"children\": {\n \"type\": \"array\",\n \"items\": {\n \"type\": \"object\",\n \"properties\": {\n \"value\": {\n \"type\": \"string\",\n \"description\": \"Value of the children's answer\"\n }\n }\n }\n }\n}", - "description": "JSON schema for the structured output", - "optional": true, - "show": { - "llmStructuredOutput[$index].type": "jsonArray" - } - }, - { - "label": "Description", - "name": "description", - "type": "string", - "placeholder": "Description of the key" - } - ], - "id": "llmAgentflow_0-input-llmStructuredOutput-array", - "display": true - }, - { - "label": "Update Flow State", - "name": "llmUpdateState", - "description": "Update runtime state during the execution of the workflow", - "type": "array", - "optional": true, - "acceptVariable": true, - "array": [ - { - "label": "Key", - "name": "key", - "type": "asyncOptions", - "loadMethod": "listRuntimeStateKeys", - "freeSolo": true - }, - { - "label": "Value", - "name": "value", - "type": "string", - "acceptVariable": true, - "acceptNodeOutputAsVariable": true - } - ], - "id": "llmAgentflow_0-input-llmUpdateState-array", - "display": true - } - ], - "inputAnchors": [], - "inputs": { - "llmModel": "chatOpenAI", - "llmMessages": [ - { - "role": "system", - "content": "

You are a financial research planner. Given a request for financial analysis, produce a set of web searches to gather the context needed. Aim for recent headlines, earnings calls or 10โ€‘K snippets, analyst commentary, and industry background. Output between 1 and 2 search terms to query for.

" - }, - { - "role": "user", - "content": "

Query:

{{ $form.query }}

" - } - ], - "llmEnableMemory": true, - "llmReturnResponseAs": "userMessage", - "llmStructuredOutput": [ - { - "key": "searches", - "type": "jsonArray", - "enumValues": "", - "jsonSchema": "{\n \"query\": {\n \"type\": \"string\",\n \"description\": \"The search term to feed into a web (or file) search.\"\n },\n \"reason\": {\n \"type\": \"string\",\n \"description\": \"Your reasoning for why this search is relevant.\"\n }\n}", - "description": "A list of searches to perform" - } - ], - "llmUpdateState": [ - { - "key": "search_key_reason", - "value": "

{{ output.searches }}

" - } - ], - "llmModelConfig": { - "cache": "", - "modelName": "gpt-4o-mini", - "temperature": 0.9, - "streaming": true, - "maxTokens": "", - "topP": "", - "frequencyPenalty": "", - "presencePenalty": "", - "timeout": "", - "strictToolCalling": "", - "stopSequence": "", - "basepath": "", - "proxyUrl": "", - "baseOptions": "", - "allowImageUploads": "", - "imageResolution": "low", - "reasoningEffort": "medium", - "llmModel": "chatOpenAI" - }, - "llmUserMessage": "

" - }, - "outputAnchors": [ - { - "id": "llmAgentflow_0-output-llmAgentflow", - "label": "LLM", - "name": "llmAgentflow" - } - ], - "outputs": {}, - "selected": false - }, - "type": "agentFlow", - "width": 168, - "height": 71, - "selected": false, - "positionAbsolute": { - "x": -92.42002168895628, - "y": 81.69973969492588 - }, - "dragging": false - }, - { - "id": "iterationAgentflow_0", - "position": { - "x": 122.70987564816664, - "y": -7.337791594648152 - }, - "data": { - "id": "iterationAgentflow_0", - "label": "Iteration 0", - "version": 1, - "name": "iterationAgentflow", - "type": "Iteration", - "color": "#9C89B8", - "baseClasses": ["Iteration"], - "category": "Agent Flows", - "description": "Execute the nodes within the iteration block through N iterations", - "inputParams": [ - { - "label": "Array Input", - "name": "iterationInput", - "type": "string", - "description": "The input array to iterate over", - "acceptVariable": true, - "rows": 4, - "id": "iterationAgentflow_0-input-iterationInput-string", - "display": true - } - ], - "inputAnchors": [], - "inputs": { - "iterationInput": "

{{ $flow.state.search_key_reason }}

" - }, - "outputAnchors": [ - { - "id": "iterationAgentflow_0-output-iterationAgentflow", - "label": "Iteration", - "name": "iterationAgentflow" - } - ], - "outputs": {}, - "selected": false - }, - "type": "iteration", - "width": 300, - "height": 250, - "selected": false, - "positionAbsolute": { - "x": 122.70987564816664, - "y": -7.337791594648152 + "x": -201.62473061824977, + "y": 92.61621373702832 }, "dragging": false }, { "id": "agentAgentflow_0", "position": { - "x": 67.5, - "y": 80.5 + "x": -61.56009223078007, + "y": 76 }, "data": { "id": "agentAgentflow_0", - "label": "Search Agent", + "label": "Email Reply Agent", "version": 1, "name": "agentAgentflow", "type": "Agent", @@ -892,7 +514,7 @@ "agentMessages": [ { "role": "system", - "content": "

You are a research assistant specializing in financial topics. Given a search term, use web search to retrieve upโ€‘toโ€‘date context and produce a short summary of at most 300 words. Focus on key numbers, events, or quotes that will be useful to a financial analyst.

" + "content": "

You are a customer support agent working in Flowise Inc. Write a professional email reply to user's query. Use the web search tools to get more details about the prospect.

Always reply as Samantha, Customer Support Representative in Flowise. Dont use placeholders.

" } ], "agentTools": [ @@ -901,12 +523,18 @@ "agentSelectedToolConfig": { "agentSelectedTool": "googleCustomSearch" } + }, + { + "agentSelectedTool": "currentDateTime", + "agentSelectedToolConfig": { + "agentSelectedTool": "currentDateTime" + } } ], "agentKnowledgeDocumentStores": "", "agentEnableMemory": true, "agentMemoryType": "allMessages", - "agentUserMessage": "

Search term: {{$iteration.query}}

Reason: {{$iteration.reason}}

", + "agentUserMessage": "", "agentReturnResponseAs": "userMessage", "agentUpdateState": "", "agentModelConfig": { @@ -926,7 +554,7 @@ "baseOptions": "", "allowImageUploads": "", "imageResolution": "low", - "reasoningEffort": "medium", + "reasoningEffort": "", "agentModel": "chatOpenAI" } }, @@ -941,46 +569,361 @@ "selected": false }, "type": "agentFlow", - "parentNode": "iterationAgentflow_0", - "extent": "parent", - "width": 168, - "height": 103, + "width": 189, + "height": 100, "selected": false, "positionAbsolute": { - "x": 190.20987564816664, - "y": 73.16220840535185 + "x": -61.56009223078007, + "y": 76 }, "dragging": false }, { - "id": "agentAgentflow_1", + "id": "humanInputAgentflow_0", "position": { - "x": 461.76351005035474, - "y": 81.71183989476083 + "x": 156.05666363734434, + "y": 86.62266545493773 }, "data": { - "id": "agentAgentflow_1", - "label": "Writer Agent", + "id": "humanInputAgentflow_0", + "label": "Human Input 0", "version": 1, - "name": "agentAgentflow", - "type": "Agent", - "color": "#4DD0E1", - "baseClasses": ["Agent"], + "name": "humanInputAgentflow", + "type": "HumanInput", + "color": "#6E6EFD", + "baseClasses": ["HumanInput"], "category": "Agent Flows", - "description": "Dynamically choose and utilize tools during runtime, enabling multi-step reasoning", + "description": "Request human input, approval or rejection during execution", "inputParams": [ + { + "label": "Description Type", + "name": "humanInputDescriptionType", + "type": "options", + "options": [ + { + "label": "Fixed", + "name": "fixed", + "description": "Specify a fixed description" + }, + { + "label": "Dynamic", + "name": "dynamic", + "description": "Use LLM to generate a description" + } + ], + "id": "humanInputAgentflow_0-input-humanInputDescriptionType-options", + "display": true + }, + { + "label": "Description", + "name": "humanInputDescription", + "type": "string", + "placeholder": "Are you sure you want to proceed?", + "acceptVariable": true, + "rows": 4, + "show": { + "humanInputDescriptionType": "fixed" + }, + "id": "humanInputAgentflow_0-input-humanInputDescription-string", + "display": true + }, { "label": "Model", - "name": "agentModel", + "name": "humanInputModel", "type": "asyncOptions", "loadMethod": "listModels", "loadConfig": true, - "id": "agentAgentflow_1-input-agentModel-asyncOptions", + "show": { + "humanInputDescriptionType": "dynamic" + }, + "id": "humanInputAgentflow_0-input-humanInputModel-asyncOptions", + "display": false + }, + { + "label": "Prompt", + "name": "humanInputModelPrompt", + "type": "string", + "default": "

Summarize the conversation between the user and the assistant, reiterate the last message from the assistant, and ask if user would like to proceed or if they have any feedback.

\n
    \n
  • Begin by capturing the key points of the conversation, ensuring that you reflect the main ideas and themes discussed.
  • \n
  • Then, clearly reproduce the last message sent by the assistant to maintain continuity. Make sure the whole message is reproduced.
  • \n
  • Finally, ask the user if they would like to proceed, or provide any feedback on the last assistant message
  • \n
\n

Output Format The output should be structured in three parts in text:

\n
    \n
  • A summary of the conversation (1-3 sentences).
  • \n
  • The last assistant message (exactly as it appeared).
  • \n
  • Ask the user if they would like to proceed, or provide any feedback on last assistant message. No other explanation and elaboration is needed.
  • \n
\n", + "acceptVariable": true, + "generateInstruction": true, + "rows": 4, + "show": { + "humanInputDescriptionType": "dynamic" + }, + "id": "humanInputAgentflow_0-input-humanInputModelPrompt-string", + "display": false + }, + { + "label": "Enable Feedback", + "name": "humanInputEnableFeedback", + "type": "boolean", + "default": true, + "id": "humanInputAgentflow_0-input-humanInputEnableFeedback-boolean", + "display": true + } + ], + "inputAnchors": [], + "inputs": { + "humanInputDescriptionType": "fixed", + "humanInputEnableFeedback": true, + "humanInputModelConfig": { + "cache": "", + "modelName": "gpt-4o-mini", + "temperature": 0.9, + "streaming": true, + "maxTokens": "", + "topP": "", + "frequencyPenalty": "", + "presencePenalty": "", + "timeout": "", + "strictToolCalling": "", + "stopSequence": "", + "basepath": "", + "proxyUrl": "", + "baseOptions": "", + "allowImageUploads": "", + "imageResolution": "low", + "reasoningEffort": "", + "humanInputModel": "chatOpenAI" + }, + "humanInputDescription": "

Are you sure you want to proceed?

" + }, + "outputAnchors": [ + { + "id": "humanInputAgentflow_0-output-0", + "label": "Human Input", + "name": "humanInputAgentflow" + }, + { + "id": "humanInputAgentflow_0-output-1", + "label": "Human Input", + "name": "humanInputAgentflow" + } + ], + "outputs": { + "humanInputAgentflow": "" + }, + "selected": false + }, + "type": "agentFlow", + "width": 167, + "height": 80, + "selected": false, + "positionAbsolute": { + "x": 156.05666363734434, + "y": 86.62266545493773 + }, + "dragging": false + }, + { + "id": "loopAgentflow_0", + "position": { + "x": 392.1370040831033, + "y": 150.41190827718114 + }, + "data": { + "id": "loopAgentflow_0", + "label": "Loop back to Agent", + "version": 1, + "name": "loopAgentflow", + "type": "Loop", + "color": "#FFA07A", + "hideOutput": true, + "baseClasses": ["Loop"], + "category": "Agent Flows", + "description": "Loop back to a previous node", + "inputParams": [ + { + "label": "Loop Back To", + "name": "loopBackToNode", + "type": "asyncOptions", + "loadMethod": "listPreviousNodes", + "freeSolo": true, + "id": "loopAgentflow_0-input-loopBackToNode-asyncOptions", + "display": true + }, + { + "label": "Max Loop Count", + "name": "maxLoopCount", + "type": "number", + "default": 5, + "id": "loopAgentflow_0-input-maxLoopCount-number", + "display": true + } + ], + "inputAnchors": [], + "inputs": { + "loopBackToNode": "agentAgentflow_0-Email Reply Agent", + "maxLoopCount": 5 + }, + "outputAnchors": [], + "outputs": {}, + "selected": false + }, + "type": "agentFlow", + "width": 198, + "height": 66, + "selected": false, + "positionAbsolute": { + "x": 392.1370040831033, + "y": 150.41190827718114 + }, + "dragging": false + }, + { + "id": "toolAgentflow_0", + "position": { + "x": 607.0106274902857, + "y": 44.74028001269521 + }, + "data": { + "id": "toolAgentflow_0", + "label": "Send Email", + "version": 1.1, + "name": "toolAgentflow", + "type": "Tool", + "color": "#d4a373", + "baseClasses": ["Tool"], + "category": "Agent Flows", + "description": "Tools allow LLM to interact with external systems", + "inputParams": [ + { + "label": "Tool", + "name": "toolAgentflowSelectedTool", + "type": "asyncOptions", + "loadMethod": "listTools", + "loadConfig": true, + "id": "toolAgentflow_0-input-toolAgentflowSelectedTool-asyncOptions", + "display": true + }, + { + "label": "Tool Input Arguments", + "name": "toolInputArgs", + "type": "array", + "acceptVariable": true, + "refresh": true, + "array": [ + { + "label": "Input Argument Name", + "name": "inputArgName", + "type": "asyncOptions", + "loadMethod": "listToolInputArgs", + "refresh": true + }, + { + "label": "Input Argument Value", + "name": "inputArgValue", + "type": "string", + "acceptVariable": true + } + ], + "show": { + "toolAgentflowSelectedTool": ".+" + }, + "id": "toolAgentflow_0-input-toolInputArgs-array", + "display": true + }, + { + "label": "Update Flow State", + "name": "toolUpdateState", + "description": "Update runtime state during the execution of the workflow", + "type": "array", + "optional": true, + "acceptVariable": true, + "array": [ + { + "label": "Key", + "name": "key", + "type": "asyncOptions", + "loadMethod": "listRuntimeStateKeys", + "freeSolo": true + }, + { + "label": "Value", + "name": "value", + "type": "string", + "acceptVariable": true, + "acceptNodeOutputAsVariable": true + } + ], + "id": "toolAgentflow_0-input-toolUpdateState-array", + "display": true + } + ], + "inputAnchors": [], + "inputs": { + "toolAgentflowSelectedTool": "gmail", + "toolInputArgs": [ + { + "inputArgName": "to", + "inputArgValue": "

{{ $form.from }}

" + }, + { + "inputArgName": "subject", + "inputArgValue": "

{{ llmAgentflow_0.output.subject }}

" + }, + { + "inputArgName": "body", + "inputArgValue": "

{{ llmAgentflow_0.output.body }}

" + } + ], + "toolUpdateState": "", + "toolAgentflowSelectedToolConfig": { + "gmailType": "messages", + "messageActions": "[\"sendMessage\"]", + "toolAgentflowSelectedTool": "gmail" + }, + "undefined": "" + }, + "outputAnchors": [ + { + "id": "toolAgentflow_0-output-toolAgentflow", + "label": "Tool", + "name": "toolAgentflow" + } + ], + "outputs": {}, + "selected": false + }, + "type": "agentFlow", + "width": 143, + "height": 68, + "selected": false, + "positionAbsolute": { + "x": 607.0106274902857, + "y": 44.74028001269521 + }, + "dragging": false + }, + { + "id": "llmAgentflow_0", + "position": { + "x": 368.9022119252032, + "y": 43.50583396320786 + }, + "data": { + "id": "llmAgentflow_0", + "label": "Email Subject & Body", + "version": 1, + "name": "llmAgentflow", + "type": "LLM", + "color": "#64B5F6", + "baseClasses": ["LLM"], + "category": "Agent Flows", + "description": "Large language models to analyze user-provided inputs and generate responses", + "inputParams": [ + { + "label": "Model", + "name": "llmModel", + "type": "asyncOptions", + "loadMethod": "listModels", + "loadConfig": true, + "id": "llmAgentflow_0-input-llmModel-asyncOptions", "display": true }, { "label": "Messages", - "name": "agentMessages", + "name": "llmMessages", "type": "array", "optional": true, "acceptVariable": true, @@ -1017,120 +960,22 @@ "rows": 4 } ], - "id": "agentAgentflow_1-input-agentMessages-array", - "display": true - }, - { - "label": "Tools", - "name": "agentTools", - "type": "array", - "optional": true, - "array": [ - { - "label": "Tool", - "name": "agentSelectedTool", - "type": "asyncOptions", - "loadMethod": "listTools", - "loadConfig": true - }, - { - "label": "Require Human Input", - "name": "agentSelectedToolRequiresHumanInput", - "type": "boolean", - "optional": true - } - ], - "id": "agentAgentflow_1-input-agentTools-array", - "display": true - }, - { - "label": "Knowledge (Document Stores)", - "name": "agentKnowledgeDocumentStores", - "type": "array", - "description": "Give your agent context about different document sources. Document stores must be upserted in advance.", - "array": [ - { - "label": "Document Store", - "name": "documentStore", - "type": "asyncOptions", - "loadMethod": "listStores" - }, - { - "label": "Describe Knowledge", - "name": "docStoreDescription", - "type": "string", - "generateDocStoreDescription": true, - "placeholder": "Describe what the knowledge base is about, this is useful for the AI to know when and how to search for correct information", - "rows": 4 - }, - { - "label": "Return Source Documents", - "name": "returnSourceDocuments", - "type": "boolean", - "optional": true - } - ], - "optional": true, - "id": "agentAgentflow_1-input-agentKnowledgeDocumentStores-array", - "display": true - }, - { - "label": "Knowledge (Vector Embeddings)", - "name": "agentKnowledgeVSEmbeddings", - "type": "array", - "description": "Give your agent context about different document sources from existing vector stores and embeddings", - "array": [ - { - "label": "Vector Store", - "name": "vectorStore", - "type": "asyncOptions", - "loadMethod": "listVectorStores", - "loadConfig": true - }, - { - "label": "Embedding Model", - "name": "embeddingModel", - "type": "asyncOptions", - "loadMethod": "listEmbeddings", - "loadConfig": true - }, - { - "label": "Knowledge Name", - "name": "knowledgeName", - "type": "string", - "placeholder": "A short name for the knowledge base, this is useful for the AI to know when and how to search for correct information" - }, - { - "label": "Describe Knowledge", - "name": "knowledgeDescription", - "type": "string", - "placeholder": "Describe what the knowledge base is about, this is useful for the AI to know when and how to search for correct information", - "rows": 4 - }, - { - "label": "Return Source Documents", - "name": "returnSourceDocuments", - "type": "boolean", - "optional": true - } - ], - "optional": true, - "id": "agentAgentflow_1-input-agentKnowledgeVSEmbeddings-array", + "id": "llmAgentflow_0-input-llmMessages-array", "display": true }, { "label": "Enable Memory", - "name": "agentEnableMemory", + "name": "llmEnableMemory", "type": "boolean", "description": "Enable memory for the conversation thread", "default": true, "optional": true, - "id": "agentAgentflow_1-input-agentEnableMemory-boolean", + "id": "llmAgentflow_0-input-llmEnableMemory-boolean", "display": true }, { "label": "Memory Type", - "name": "agentMemoryType", + "name": "llmMemoryType", "type": "options", "options": [ { @@ -1157,52 +1002,52 @@ "optional": true, "default": "allMessages", "show": { - "agentEnableMemory": true + "llmEnableMemory": true }, - "id": "agentAgentflow_1-input-agentMemoryType-options", + "id": "llmAgentflow_0-input-llmMemoryType-options", "display": true }, { "label": "Window Size", - "name": "agentMemoryWindowSize", + "name": "llmMemoryWindowSize", "type": "number", "default": "20", "description": "Uses a fixed window size to surface the last N messages", "show": { - "agentMemoryType": "windowSize" + "llmMemoryType": "windowSize" }, - "id": "agentAgentflow_1-input-agentMemoryWindowSize-number", + "id": "llmAgentflow_0-input-llmMemoryWindowSize-number", "display": false }, { "label": "Max Token Limit", - "name": "agentMemoryMaxTokenLimit", + "name": "llmMemoryMaxTokenLimit", "type": "number", "default": "2000", "description": "Summarize conversations once token limit is reached. Default to 2000", "show": { - "agentMemoryType": "conversationSummaryBuffer" + "llmMemoryType": "conversationSummaryBuffer" }, - "id": "agentAgentflow_1-input-agentMemoryMaxTokenLimit-number", + "id": "llmAgentflow_0-input-llmMemoryMaxTokenLimit-number", "display": false }, { "label": "Input Message", - "name": "agentUserMessage", + "name": "llmUserMessage", "type": "string", "description": "Add an input message as user message at the end of the conversation", "rows": 4, "optional": true, "acceptVariable": true, "show": { - "agentEnableMemory": true + "llmEnableMemory": true }, - "id": "agentAgentflow_1-input-agentUserMessage-string", + "id": "llmAgentflow_0-input-llmUserMessage-string", "display": true }, { "label": "Return Response As", - "name": "agentReturnResponseAs", + "name": "llmReturnResponseAs", "type": "options", "options": [ { @@ -1215,12 +1060,88 @@ } ], "default": "userMessage", - "id": "agentAgentflow_1-input-agentReturnResponseAs-options", + "id": "llmAgentflow_0-input-llmReturnResponseAs-options", + "display": true + }, + { + "label": "JSON Structured Output", + "name": "llmStructuredOutput", + "description": "Instruct the LLM to give output in a JSON structured schema", + "type": "array", + "optional": true, + "acceptVariable": true, + "array": [ + { + "label": "Key", + "name": "key", + "type": "string" + }, + { + "label": "Type", + "name": "type", + "type": "options", + "options": [ + { + "label": "String", + "name": "string" + }, + { + "label": "String Array", + "name": "stringArray" + }, + { + "label": "Number", + "name": "number" + }, + { + "label": "Boolean", + "name": "boolean" + }, + { + "label": "Enum", + "name": "enum" + }, + { + "label": "JSON Array", + "name": "jsonArray" + } + ] + }, + { + "label": "Enum Values", + "name": "enumValues", + "type": "string", + "placeholder": "value1, value2, value3", + "description": "Enum values. Separated by comma", + "optional": true, + "show": { + "llmStructuredOutput[$index].type": "enum" + } + }, + { + "label": "JSON Schema", + "name": "jsonSchema", + "type": "code", + "placeholder": "{\n \"answer\": {\n \"type\": \"string\",\n \"description\": \"Value of the answer\"\n },\n \"reason\": {\n \"type\": \"string\",\n \"description\": \"Reason for the answer\"\n },\n \"optional\": {\n \"type\": \"boolean\"\n },\n \"count\": {\n \"type\": \"number\"\n },\n \"children\": {\n \"type\": \"array\",\n \"items\": {\n \"type\": \"object\",\n \"properties\": {\n \"value\": {\n \"type\": \"string\",\n \"description\": \"Value of the children's answer\"\n }\n }\n }\n }\n}", + "description": "JSON schema for the structured output", + "optional": true, + "show": { + "llmStructuredOutput[$index].type": "jsonArray" + } + }, + { + "label": "Description", + "name": "description", + "type": "string", + "placeholder": "Description of the key" + } + ], + "id": "llmAgentflow_0-input-llmStructuredOutput-array", "display": true }, { "label": "Update Flow State", - "name": "agentUpdateState", + "name": "llmUpdateState", "description": "Update runtime state during the execution of the workflow", "type": "array", "optional": true, @@ -1241,27 +1162,36 @@ "acceptNodeOutputAsVariable": true } ], - "id": "agentAgentflow_1-input-agentUpdateState-array", + "id": "llmAgentflow_0-input-llmUpdateState-array", "display": true } ], "inputAnchors": [], "inputs": { - "agentModel": "chatOpenAI", - "agentMessages": [ + "llmModel": "chatOpenAI", + "llmMessages": [], + "llmEnableMemory": true, + "llmMemoryType": "allMessages", + "llmUserMessage": "", + "llmReturnResponseAs": "userMessage", + "llmStructuredOutput": [ { - "role": "system", - "content": "

You are a senior financial analyst. You will be provided with the original query and a set of raw search summaries. Your task is to synthesize these into a longโ€‘form markdown report (at least several paragraphs) including a short executive summary and followโ€‘up questions

" + "key": "subject", + "type": "string", + "enumValues": "", + "jsonSchema": "", + "description": "Subject of the email" + }, + { + "key": "body", + "type": "string", + "enumValues": "", + "jsonSchema": "", + "description": "Body of the email" } ], - "agentTools": "", - "agentKnowledgeDocumentStores": "", - "agentEnableMemory": true, - "agentMemoryType": "allMessages", - "agentUserMessage": "

Original query: {{ $form.query }}

Summarized search results: {{ iterationAgentflow_0 }}

", - "agentReturnResponseAs": "userMessage", - "agentUpdateState": "", - "agentModelConfig": { + "llmUpdateState": "", + "llmModelConfig": { "cache": "", "modelName": "gpt-4o-mini", "temperature": 0.9, @@ -1278,183 +1208,27 @@ "baseOptions": "", "allowImageUploads": "", "imageResolution": "low", - "reasoningEffort": "medium", - "agentModel": "chatOpenAI" + "reasoningEffort": "", + "llmModel": "chatOpenAI" } }, "outputAnchors": [ { - "id": "agentAgentflow_1-output-agentAgentflow", - "label": "Agent", - "name": "agentAgentflow" + "id": "llmAgentflow_0-output-llmAgentflow", + "label": "LLM", + "name": "llmAgentflow" } ], "outputs": {}, "selected": false }, "type": "agentFlow", - "width": 168, - "height": 71, + "width": 209, + "height": 72, "selected": false, "positionAbsolute": { - "x": 461.76351005035474, - "y": 81.71183989476083 - }, - "dragging": false - }, - { - "id": "stickyNoteAgentflow_0", - "position": { - "x": 214.77714507955716, - "y": -165.2444952661696 - }, - "data": { - "id": "stickyNoteAgentflow_0", - "label": "Sticky Note", - "version": 1, - "name": "stickyNoteAgentflow", - "type": "StickyNote", - "color": "#fee440", - "baseClasses": ["StickyNote"], - "category": "Agent Flows", - "description": "Add notes to the agent flow", - "inputParams": [ - { - "label": "", - "name": "note", - "type": "string", - "rows": 1, - "placeholder": "Type something here", - "optional": true, - "id": "stickyNoteAgentflow_0-input-note-string", - "display": true - } - ], - "inputAnchors": [], - "inputs": { - "note": "Search Agent will iterate through the search terms and search the web using tool" - }, - "outputAnchors": [ - { - "id": "stickyNoteAgentflow_0-output-stickyNoteAgentflow", - "label": "Sticky Note", - "name": "stickyNoteAgentflow" - } - ], - "outputs": {}, - "selected": false - }, - "type": "stickyNote", - "width": 189, - "height": 142, - "selected": false, - "positionAbsolute": { - "x": 214.77714507955716, - "y": -165.2444952661696 - }, - "dragging": false - }, - { - "id": "stickyNoteAgentflow_1", - "position": { - "x": -100.05436009717414, - "y": -45.56902388417101 - }, - "data": { - "id": "stickyNoteAgentflow_1", - "label": "Sticky Note (1)", - "version": 1, - "name": "stickyNoteAgentflow", - "type": "StickyNote", - "color": "#fee440", - "baseClasses": ["StickyNote"], - "category": "Agent Flows", - "description": "Add notes to the agent flow", - "inputParams": [ - { - "label": "", - "name": "note", - "type": "string", - "rows": 1, - "placeholder": "Type something here", - "optional": true, - "id": "stickyNoteAgentflow_1-input-note-string", - "display": true - } - ], - "inputAnchors": [], - "inputs": { - "note": "Planner will generate list of search terms to query for" - }, - "outputAnchors": [ - { - "id": "stickyNoteAgentflow_1-output-stickyNoteAgentflow", - "label": "Sticky Note", - "name": "stickyNoteAgentflow" - } - ], - "outputs": {}, - "selected": false - }, - "type": "stickyNote", - "width": 189, - "height": 101, - "selected": false, - "positionAbsolute": { - "x": -100.05436009717414, - "y": -45.56902388417101 - }, - "dragging": false - }, - { - "id": "stickyNoteAgentflow_2", - "position": { - "x": 457.98399139175314, - "y": -35.19227767879839 - }, - "data": { - "id": "stickyNoteAgentflow_2", - "label": "Sticky Note (2)", - "version": 1, - "name": "stickyNoteAgentflow", - "type": "StickyNote", - "color": "#fee440", - "baseClasses": ["StickyNote"], - "category": "Agent Flows", - "description": "Add notes to the agent flow", - "inputParams": [ - { - "label": "", - "name": "note", - "type": "string", - "rows": 1, - "placeholder": "Type something here", - "optional": true, - "id": "stickyNoteAgentflow_2-input-note-string", - "display": true - } - ], - "inputAnchors": [], - "inputs": { - "note": "Generate the final report from the search results" - }, - "outputAnchors": [ - { - "id": "stickyNoteAgentflow_2-output-stickyNoteAgentflow", - "label": "Sticky Note", - "name": "stickyNoteAgentflow" - } - ], - "outputs": {}, - "selected": false - }, - "type": "stickyNote", - "width": 189, - "height": 101, - "selected": false, - "positionAbsolute": { - "x": 457.98399139175314, - "y": -35.19227767879839 + "x": 368.9022119252032, + "y": 43.50583396320786 }, "dragging": false } @@ -1463,41 +1237,69 @@ { "source": "startAgentflow_0", "sourceHandle": "startAgentflow_0-output-startAgentflow", - "target": "llmAgentflow_0", - "targetHandle": "llmAgentflow_0", + "target": "agentAgentflow_0", + "targetHandle": "agentAgentflow_0", "data": { "sourceColor": "#7EE787", - "targetColor": "#64B5F6", - "isHumanInput": false - }, - "type": "agentFlow", - "id": "startAgentflow_0-startAgentflow_0-output-startAgentflow-llmAgentflow_0-llmAgentflow_0" - }, - { - "source": "llmAgentflow_0", - "sourceHandle": "llmAgentflow_0-output-llmAgentflow", - "target": "iterationAgentflow_0", - "targetHandle": "iterationAgentflow_0", - "data": { - "sourceColor": "#64B5F6", - "targetColor": "#9C89B8", - "isHumanInput": false - }, - "type": "agentFlow", - "id": "llmAgentflow_0-llmAgentflow_0-output-llmAgentflow-iterationAgentflow_0-iterationAgentflow_0" - }, - { - "source": "iterationAgentflow_0", - "sourceHandle": "iterationAgentflow_0-output-iterationAgentflow", - "target": "agentAgentflow_1", - "targetHandle": "agentAgentflow_1", - "data": { - "sourceColor": "#9C89B8", "targetColor": "#4DD0E1", "isHumanInput": false }, "type": "agentFlow", - "id": "iterationAgentflow_0-iterationAgentflow_0-output-iterationAgentflow-agentAgentflow_1-agentAgentflow_1" + "id": "startAgentflow_0-startAgentflow_0-output-startAgentflow-agentAgentflow_0-agentAgentflow_0" + }, + { + "source": "agentAgentflow_0", + "sourceHandle": "agentAgentflow_0-output-agentAgentflow", + "target": "humanInputAgentflow_0", + "targetHandle": "humanInputAgentflow_0", + "data": { + "sourceColor": "#4DD0E1", + "targetColor": "#6E6EFD", + "isHumanInput": false + }, + "type": "agentFlow", + "id": "agentAgentflow_0-agentAgentflow_0-output-agentAgentflow-humanInputAgentflow_0-humanInputAgentflow_0" + }, + { + "source": "humanInputAgentflow_0", + "sourceHandle": "humanInputAgentflow_0-output-1", + "target": "loopAgentflow_0", + "targetHandle": "loopAgentflow_0", + "data": { + "sourceColor": "#6E6EFD", + "targetColor": "#FFA07A", + "edgeLabel": "reject", + "isHumanInput": true + }, + "type": "agentFlow", + "id": "humanInputAgentflow_0-humanInputAgentflow_0-output-1-loopAgentflow_0-loopAgentflow_0" + }, + { + "source": "humanInputAgentflow_0", + "sourceHandle": "humanInputAgentflow_0-output-0", + "target": "llmAgentflow_0", + "targetHandle": "llmAgentflow_0", + "data": { + "sourceColor": "#6E6EFD", + "targetColor": "#64B5F6", + "edgeLabel": "proceed", + "isHumanInput": true + }, + "type": "agentFlow", + "id": "humanInputAgentflow_0-humanInputAgentflow_0-output-0-llmAgentflow_0-llmAgentflow_0" + }, + { + "source": "llmAgentflow_0", + "sourceHandle": "llmAgentflow_0-output-llmAgentflow", + "target": "toolAgentflow_0", + "targetHandle": "toolAgentflow_0", + "data": { + "sourceColor": "#64B5F6", + "targetColor": "#d4a373", + "isHumanInput": false + }, + "type": "agentFlow", + "id": "llmAgentflow_0-llmAgentflow_0-output-llmAgentflow-toolAgentflow_0-toolAgentflow_0" } ] } diff --git a/packages/server/marketplaces/agentflowsv2/Interacting With API.json b/packages/server/marketplaces/agentflowsv2/Interacting With API.json new file mode 100644 index 000000000..6d505f5be --- /dev/null +++ b/packages/server/marketplaces/agentflowsv2/Interacting With API.json @@ -0,0 +1,1294 @@ +{ + "description": "Different ways of agents that can interact with APIs", + "usecases": ["Interacting with API"], + "nodes": [ + { + "id": "startAgentflow_0", + "type": "agentFlow", + "position": { + "x": 122, + "y": 46.5 + }, + "data": { + "id": "startAgentflow_0", + "label": "Start", + "version": 1.1, + "name": "startAgentflow", + "type": "Start", + "color": "#7EE787", + "hideInput": true, + "baseClasses": ["Start"], + "category": "Agent Flows", + "description": "Starting point of the agentflow", + "inputParams": [ + { + "label": "Input Type", + "name": "startInputType", + "type": "options", + "options": [ + { + "label": "Chat Input", + "name": "chatInput", + "description": "Start the conversation with chat input" + }, + { + "label": "Form Input", + "name": "formInput", + "description": "Start the workflow with form inputs" + } + ], + "default": "chatInput", + "id": "startAgentflow_0-input-startInputType-options", + "display": true + }, + { + "label": "Form Title", + "name": "formTitle", + "type": "string", + "placeholder": "Please Fill Out The Form", + "show": { + "startInputType": "formInput" + }, + "id": "startAgentflow_0-input-formTitle-string", + "display": false + }, + { + "label": "Form Description", + "name": "formDescription", + "type": "string", + "placeholder": "Complete all fields below to continue", + "show": { + "startInputType": "formInput" + }, + "id": "startAgentflow_0-input-formDescription-string", + "display": false + }, + { + "label": "Form Input Types", + "name": "formInputTypes", + "description": "Specify the type of form input", + "type": "array", + "show": { + "startInputType": "formInput" + }, + "array": [ + { + "label": "Type", + "name": "type", + "type": "options", + "options": [ + { + "label": "String", + "name": "string" + }, + { + "label": "Number", + "name": "number" + }, + { + "label": "Boolean", + "name": "boolean" + }, + { + "label": "Options", + "name": "options" + } + ], + "default": "string" + }, + { + "label": "Label", + "name": "label", + "type": "string", + "placeholder": "Label for the input" + }, + { + "label": "Variable Name", + "name": "name", + "type": "string", + "placeholder": "Variable name for the input (must be camel case)", + "description": "Variable name must be camel case. For example: firstName, lastName, etc." + }, + { + "label": "Add Options", + "name": "addOptions", + "type": "array", + "show": { + "formInputTypes[$index].type": "options" + }, + "array": [ + { + "label": "Option", + "name": "option", + "type": "string" + } + ] + } + ], + "id": "startAgentflow_0-input-formInputTypes-array", + "display": false + }, + { + "label": "Ephemeral Memory", + "name": "startEphemeralMemory", + "type": "boolean", + "description": "Start fresh for every execution without past chat history", + "optional": true, + "id": "startAgentflow_0-input-startEphemeralMemory-boolean", + "display": true + }, + { + "label": "Flow State", + "name": "startState", + "description": "Runtime state during the execution of the workflow", + "type": "array", + "optional": true, + "array": [ + { + "label": "Key", + "name": "key", + "type": "string", + "placeholder": "Foo" + }, + { + "label": "Value", + "name": "value", + "type": "string", + "placeholder": "Bar", + "optional": true + } + ], + "id": "startAgentflow_0-input-startState-array", + "display": true + }, + { + "label": "Persist State", + "name": "startPersistState", + "type": "boolean", + "description": "Persist the state in the same session", + "optional": true, + "id": "startAgentflow_0-input-startPersistState-boolean", + "display": true + } + ], + "inputAnchors": [], + "inputs": { + "startInputType": "chatInput", + "formTitle": "", + "formDescription": "", + "formInputTypes": "", + "startEphemeralMemory": "", + "startState": "", + "startPersistState": "" + }, + "outputAnchors": [ + { + "id": "startAgentflow_0-output-startAgentflow", + "label": "Start", + "name": "startAgentflow" + } + ], + "outputs": {}, + "selected": false + }, + "width": 103, + "height": 66, + "positionAbsolute": { + "x": 122, + "y": 46.5 + }, + "selected": false, + "dragging": false + }, + { + "id": "agentAgentflow_0", + "position": { + "x": 276.5, + "y": 30 + }, + "data": { + "id": "agentAgentflow_0", + "label": "Requests Agent", + "version": 1, + "name": "agentAgentflow", + "type": "Agent", + "color": "#4DD0E1", + "baseClasses": ["Agent"], + "category": "Agent Flows", + "description": "Dynamically choose and utilize tools during runtime, enabling multi-step reasoning", + "inputParams": [ + { + "label": "Model", + "name": "agentModel", + "type": "asyncOptions", + "loadMethod": "listModels", + "loadConfig": true, + "id": "agentAgentflow_0-input-agentModel-asyncOptions", + "display": true + }, + { + "label": "Messages", + "name": "agentMessages", + "type": "array", + "optional": true, + "acceptVariable": true, + "array": [ + { + "label": "Role", + "name": "role", + "type": "options", + "options": [ + { + "label": "System", + "name": "system" + }, + { + "label": "Assistant", + "name": "assistant" + }, + { + "label": "Developer", + "name": "developer" + }, + { + "label": "User", + "name": "user" + } + ] + }, + { + "label": "Content", + "name": "content", + "type": "string", + "acceptVariable": true, + "generateInstruction": true, + "rows": 4 + } + ], + "id": "agentAgentflow_0-input-agentMessages-array", + "display": true + }, + { + "label": "Tools", + "name": "agentTools", + "type": "array", + "optional": true, + "array": [ + { + "label": "Tool", + "name": "agentSelectedTool", + "type": "asyncOptions", + "loadMethod": "listTools", + "loadConfig": true + }, + { + "label": "Require Human Input", + "name": "agentSelectedToolRequiresHumanInput", + "type": "boolean", + "optional": true + } + ], + "id": "agentAgentflow_0-input-agentTools-array", + "display": true + }, + { + "label": "Knowledge (Document Stores)", + "name": "agentKnowledgeDocumentStores", + "type": "array", + "description": "Give your agent context about different document sources. Document stores must be upserted in advance.", + "array": [ + { + "label": "Document Store", + "name": "documentStore", + "type": "asyncOptions", + "loadMethod": "listStores" + }, + { + "label": "Describe Knowledge", + "name": "docStoreDescription", + "type": "string", + "generateDocStoreDescription": true, + "placeholder": "Describe what the knowledge base is about, this is useful for the AI to know when and how to search for correct information", + "rows": 4 + }, + { + "label": "Return Source Documents", + "name": "returnSourceDocuments", + "type": "boolean", + "optional": true + } + ], + "optional": true, + "id": "agentAgentflow_0-input-agentKnowledgeDocumentStores-array", + "display": true + }, + { + "label": "Knowledge (Vector Embeddings)", + "name": "agentKnowledgeVSEmbeddings", + "type": "array", + "description": "Give your agent context about different document sources from existing vector stores and embeddings", + "array": [ + { + "label": "Vector Store", + "name": "vectorStore", + "type": "asyncOptions", + "loadMethod": "listVectorStores", + "loadConfig": true + }, + { + "label": "Embedding Model", + "name": "embeddingModel", + "type": "asyncOptions", + "loadMethod": "listEmbeddings", + "loadConfig": true + }, + { + "label": "Knowledge Name", + "name": "knowledgeName", + "type": "string", + "placeholder": "A short name for the knowledge base, this is useful for the AI to know when and how to search for correct information" + }, + { + "label": "Describe Knowledge", + "name": "knowledgeDescription", + "type": "string", + "placeholder": "Describe what the knowledge base is about, this is useful for the AI to know when and how to search for correct information", + "rows": 4 + }, + { + "label": "Return Source Documents", + "name": "returnSourceDocuments", + "type": "boolean", + "optional": true + } + ], + "optional": true, + "id": "agentAgentflow_0-input-agentKnowledgeVSEmbeddings-array", + "display": true + }, + { + "label": "Enable Memory", + "name": "agentEnableMemory", + "type": "boolean", + "description": "Enable memory for the conversation thread", + "default": true, + "optional": true, + "id": "agentAgentflow_0-input-agentEnableMemory-boolean", + "display": true + }, + { + "label": "Memory Type", + "name": "agentMemoryType", + "type": "options", + "options": [ + { + "label": "All Messages", + "name": "allMessages", + "description": "Retrieve all messages from the conversation" + }, + { + "label": "Window Size", + "name": "windowSize", + "description": "Uses a fixed window size to surface the last N messages" + }, + { + "label": "Conversation Summary", + "name": "conversationSummary", + "description": "Summarizes the whole conversation" + }, + { + "label": "Conversation Summary Buffer", + "name": "conversationSummaryBuffer", + "description": "Summarize conversations once token limit is reached. Default to 2000" + } + ], + "optional": true, + "default": "allMessages", + "show": { + "agentEnableMemory": true + }, + "id": "agentAgentflow_0-input-agentMemoryType-options", + "display": true + }, + { + "label": "Window Size", + "name": "agentMemoryWindowSize", + "type": "number", + "default": "20", + "description": "Uses a fixed window size to surface the last N messages", + "show": { + "agentMemoryType": "windowSize" + }, + "id": "agentAgentflow_0-input-agentMemoryWindowSize-number", + "display": false + }, + { + "label": "Max Token Limit", + "name": "agentMemoryMaxTokenLimit", + "type": "number", + "default": "2000", + "description": "Summarize conversations once token limit is reached. Default to 2000", + "show": { + "agentMemoryType": "conversationSummaryBuffer" + }, + "id": "agentAgentflow_0-input-agentMemoryMaxTokenLimit-number", + "display": false + }, + { + "label": "Input Message", + "name": "agentUserMessage", + "type": "string", + "description": "Add an input message as user message at the end of the conversation", + "rows": 4, + "optional": true, + "acceptVariable": true, + "show": { + "agentEnableMemory": true + }, + "id": "agentAgentflow_0-input-agentUserMessage-string", + "display": true + }, + { + "label": "Return Response As", + "name": "agentReturnResponseAs", + "type": "options", + "options": [ + { + "label": "User Message", + "name": "userMessage" + }, + { + "label": "Assistant Message", + "name": "assistantMessage" + } + ], + "default": "userMessage", + "id": "agentAgentflow_0-input-agentReturnResponseAs-options", + "display": true + }, + { + "label": "Update Flow State", + "name": "agentUpdateState", + "description": "Update runtime state during the execution of the workflow", + "type": "array", + "optional": true, + "acceptVariable": true, + "array": [ + { + "label": "Key", + "name": "key", + "type": "asyncOptions", + "loadMethod": "listRuntimeStateKeys", + "freeSolo": true + }, + { + "label": "Value", + "name": "value", + "type": "string", + "acceptVariable": true, + "acceptNodeOutputAsVariable": true + } + ], + "id": "agentAgentflow_0-input-agentUpdateState-array", + "display": true + } + ], + "inputAnchors": [], + "inputs": { + "agentModel": "chatOpenAI", + "agentMessages": [ + { + "role": "system", + "content": "

You are helpful assistant.

Todays date time is {{ current_date_time }}

" + } + ], + "agentTools": [ + { + "agentSelectedTool": "requestsGet", + "agentSelectedToolRequiresHumanInput": "", + "agentSelectedToolConfig": { + "requestsGetUrl": "

http://localhost:5566/events

", + "requestsGetName": "get_events", + "requestsGetDescription": "Use this when you need to get events", + "requestsGetHeaders": "", + "requestsGetQueryParamsSchema": "{\n \"id\": {\n \"type\": \"string\",\n \"in\": \"path\",\n \"description\": \"ID of the item to get. /:id\"\n },\n \"limit\": {\n \"type\": \"string\",\n \"in\": \"query\",\n \"description\": \"Limit the number of items to get. ?limit=10\"\n }\n}", + "requestsGetMaxOutputLength": "2000", + "agentSelectedTool": "requestsGet" + } + }, + { + "agentSelectedTool": "requestsPost", + "agentSelectedToolRequiresHumanInput": "", + "agentSelectedToolConfig": { + "requestsPostUrl": "

http://localhost:5566/events

", + "requestsPostName": "create_event", + "requestsPostDescription": "Use this when you want to create a new event", + "requestsPostHeaders": "", + "requestPostBody": "", + "requestsPostBodySchema": "{\n \"name\": {\n \"type\": \"string\",\n \"required\": true,\n \"description\": \"Name of the event\"\n },\n \"date\": {\n \"type\": \"string\",\n \"required\": true,\n \"description\": \"Date of the event\"\n },\n \"location\": {\n \"type\": \"string\",\n \"required\": true,\n \"description\": \"Location of the event\"\n }\n}", + "requestsPostMaxOutputLength": "2000", + "agentSelectedTool": "requestsPost" + } + }, + { + "agentSelectedTool": "requestsPut", + "agentSelectedToolRequiresHumanInput": "", + "agentSelectedToolConfig": { + "requestsPutUrl": "

http://localhost:5566/events

", + "requestsPutName": "update_event", + "requestsPutDescription": "Use this when you want to update an event", + "requestsPutHeaders": "", + "requestPutBody": "", + "requestsPutBodySchema": "{\n \"name\": {\n \"type\": \"string\",\n \"required\": true,\n \"description\": \"Name of the event\"\n },\n \"date\": {\n \"type\": \"string\",\n \"required\": true,\n \"description\": \"Date of the event\"\n },\n \"location\": {\n \"type\": \"string\",\n \"required\": true,\n \"description\": \"Location of the event\"\n }\n}", + "requestsPutMaxOutputLength": "2000", + "agentSelectedTool": "requestsPut" + } + }, + { + "agentSelectedTool": "requestsDelete", + "agentSelectedToolRequiresHumanInput": "", + "agentSelectedToolConfig": { + "requestsDeleteUrl": "

http://localhost:5566/events

", + "requestsDeleteName": "delete_event", + "requestsDeleteDescription": "Use this when you need to delete event", + "requestsDeleteHeaders": "", + "requestsDeleteQueryParamsSchema": "{\n \"id\": {\n \"type\": \"string\",\n \"required\": true,\n \"in\": \"path\",\n \"description\": \"ID of the item to delete. /:id\"\n }\n}", + "requestsDeleteMaxOutputLength": "2000", + "agentSelectedTool": "requestsDelete" + } + } + ], + "agentKnowledgeDocumentStores": [], + "agentKnowledgeVSEmbeddings": "", + "agentEnableMemory": true, + "agentMemoryType": "allMessages", + "agentUserMessage": "", + "agentReturnResponseAs": "userMessage", + "agentUpdateState": "", + "agentModelConfig": { + "credential": "", + "modelName": "gpt-4o-mini", + "temperature": 0.9, + "streaming": true, + "maxTokens": "", + "topP": "", + "frequencyPenalty": "", + "presencePenalty": "", + "timeout": "", + "strictToolCalling": "", + "stopSequence": "", + "basepath": "", + "proxyUrl": "", + "baseOptions": "", + "allowImageUploads": "", + "imageResolution": "low", + "reasoningEffort": "", + "agentModel": "chatOpenAI" + } + }, + "outputAnchors": [ + { + "id": "agentAgentflow_0-output-agentAgentflow", + "label": "Agent", + "name": "agentAgentflow" + } + ], + "outputs": {}, + "selected": false + }, + "type": "agentFlow", + "width": 176, + "height": 100, + "selected": false, + "positionAbsolute": { + "x": 276.5, + "y": 30 + }, + "dragging": false + }, + { + "id": "agentAgentflow_1", + "position": { + "x": 486.5, + "y": 30.25 + }, + "data": { + "id": "agentAgentflow_1", + "label": "OpenAPI Agent", + "version": 1, + "name": "agentAgentflow", + "type": "Agent", + "color": "#4DD0E1", + "baseClasses": ["Agent"], + "category": "Agent Flows", + "description": "Dynamically choose and utilize tools during runtime, enabling multi-step reasoning", + "inputParams": [ + { + "label": "Model", + "name": "agentModel", + "type": "asyncOptions", + "loadMethod": "listModels", + "loadConfig": true, + "id": "agentAgentflow_1-input-agentModel-asyncOptions", + "display": true + }, + { + "label": "Messages", + "name": "agentMessages", + "type": "array", + "optional": true, + "acceptVariable": true, + "array": [ + { + "label": "Role", + "name": "role", + "type": "options", + "options": [ + { + "label": "System", + "name": "system" + }, + { + "label": "Assistant", + "name": "assistant" + }, + { + "label": "Developer", + "name": "developer" + }, + { + "label": "User", + "name": "user" + } + ] + }, + { + "label": "Content", + "name": "content", + "type": "string", + "acceptVariable": true, + "generateInstruction": true, + "rows": 4 + } + ], + "id": "agentAgentflow_1-input-agentMessages-array", + "display": true + }, + { + "label": "Tools", + "name": "agentTools", + "type": "array", + "optional": true, + "array": [ + { + "label": "Tool", + "name": "agentSelectedTool", + "type": "asyncOptions", + "loadMethod": "listTools", + "loadConfig": true + }, + { + "label": "Require Human Input", + "name": "agentSelectedToolRequiresHumanInput", + "type": "boolean", + "optional": true + } + ], + "id": "agentAgentflow_1-input-agentTools-array", + "display": true + }, + { + "label": "Knowledge (Document Stores)", + "name": "agentKnowledgeDocumentStores", + "type": "array", + "description": "Give your agent context about different document sources. Document stores must be upserted in advance.", + "array": [ + { + "label": "Document Store", + "name": "documentStore", + "type": "asyncOptions", + "loadMethod": "listStores" + }, + { + "label": "Describe Knowledge", + "name": "docStoreDescription", + "type": "string", + "generateDocStoreDescription": true, + "placeholder": "Describe what the knowledge base is about, this is useful for the AI to know when and how to search for correct information", + "rows": 4 + }, + { + "label": "Return Source Documents", + "name": "returnSourceDocuments", + "type": "boolean", + "optional": true + } + ], + "optional": true, + "id": "agentAgentflow_1-input-agentKnowledgeDocumentStores-array", + "display": true + }, + { + "label": "Knowledge (Vector Embeddings)", + "name": "agentKnowledgeVSEmbeddings", + "type": "array", + "description": "Give your agent context about different document sources from existing vector stores and embeddings", + "array": [ + { + "label": "Vector Store", + "name": "vectorStore", + "type": "asyncOptions", + "loadMethod": "listVectorStores", + "loadConfig": true + }, + { + "label": "Embedding Model", + "name": "embeddingModel", + "type": "asyncOptions", + "loadMethod": "listEmbeddings", + "loadConfig": true + }, + { + "label": "Knowledge Name", + "name": "knowledgeName", + "type": "string", + "placeholder": "A short name for the knowledge base, this is useful for the AI to know when and how to search for correct information" + }, + { + "label": "Describe Knowledge", + "name": "knowledgeDescription", + "type": "string", + "placeholder": "Describe what the knowledge base is about, this is useful for the AI to know when and how to search for correct information", + "rows": 4 + }, + { + "label": "Return Source Documents", + "name": "returnSourceDocuments", + "type": "boolean", + "optional": true + } + ], + "optional": true, + "id": "agentAgentflow_1-input-agentKnowledgeVSEmbeddings-array", + "display": true + }, + { + "label": "Enable Memory", + "name": "agentEnableMemory", + "type": "boolean", + "description": "Enable memory for the conversation thread", + "default": true, + "optional": true, + "id": "agentAgentflow_1-input-agentEnableMemory-boolean", + "display": true + }, + { + "label": "Memory Type", + "name": "agentMemoryType", + "type": "options", + "options": [ + { + "label": "All Messages", + "name": "allMessages", + "description": "Retrieve all messages from the conversation" + }, + { + "label": "Window Size", + "name": "windowSize", + "description": "Uses a fixed window size to surface the last N messages" + }, + { + "label": "Conversation Summary", + "name": "conversationSummary", + "description": "Summarizes the whole conversation" + }, + { + "label": "Conversation Summary Buffer", + "name": "conversationSummaryBuffer", + "description": "Summarize conversations once token limit is reached. Default to 2000" + } + ], + "optional": true, + "default": "allMessages", + "show": { + "agentEnableMemory": true + }, + "id": "agentAgentflow_1-input-agentMemoryType-options", + "display": true + }, + { + "label": "Window Size", + "name": "agentMemoryWindowSize", + "type": "number", + "default": "20", + "description": "Uses a fixed window size to surface the last N messages", + "show": { + "agentMemoryType": "windowSize" + }, + "id": "agentAgentflow_1-input-agentMemoryWindowSize-number", + "display": false + }, + { + "label": "Max Token Limit", + "name": "agentMemoryMaxTokenLimit", + "type": "number", + "default": "2000", + "description": "Summarize conversations once token limit is reached. Default to 2000", + "show": { + "agentMemoryType": "conversationSummaryBuffer" + }, + "id": "agentAgentflow_1-input-agentMemoryMaxTokenLimit-number", + "display": false + }, + { + "label": "Input Message", + "name": "agentUserMessage", + "type": "string", + "description": "Add an input message as user message at the end of the conversation", + "rows": 4, + "optional": true, + "acceptVariable": true, + "show": { + "agentEnableMemory": true + }, + "id": "agentAgentflow_1-input-agentUserMessage-string", + "display": true + }, + { + "label": "Return Response As", + "name": "agentReturnResponseAs", + "type": "options", + "options": [ + { + "label": "User Message", + "name": "userMessage" + }, + { + "label": "Assistant Message", + "name": "assistantMessage" + } + ], + "default": "userMessage", + "id": "agentAgentflow_1-input-agentReturnResponseAs-options", + "display": true + }, + { + "label": "Update Flow State", + "name": "agentUpdateState", + "description": "Update runtime state during the execution of the workflow", + "type": "array", + "optional": true, + "acceptVariable": true, + "array": [ + { + "label": "Key", + "name": "key", + "type": "asyncOptions", + "loadMethod": "listRuntimeStateKeys", + "freeSolo": true + }, + { + "label": "Value", + "name": "value", + "type": "string", + "acceptVariable": true, + "acceptNodeOutputAsVariable": true + } + ], + "id": "agentAgentflow_1-input-agentUpdateState-array", + "display": true + } + ], + "inputAnchors": [], + "inputs": { + "agentModel": "chatAnthropic", + "agentMessages": [ + { + "role": "system", + "content": "

You are helpful assistant.

Todays date time is {{ current_date_time }}

" + } + ], + "agentTools": [ + { + "agentSelectedTool": "openAPIToolkit", + "agentSelectedToolRequiresHumanInput": "", + "agentSelectedToolConfig": { + "yamlFile": "", + "returnDirect": "", + "headers": "", + "removeNulls": "", + "customCode": "const fetch = require('node-fetch');\nconst url = $url;\nconst options = $options;\n\ntry {\n\tconst response = await fetch(url, options);\n\tconst resp = await response.json();\n\treturn JSON.stringify(resp);\n} catch (error) {\n\tconsole.error(error);\n\treturn '';\n}\n", + "agentSelectedTool": "openAPIToolkit" + } + } + ], + "agentKnowledgeDocumentStores": "", + "agentKnowledgeVSEmbeddings": "", + "agentEnableMemory": true, + "agentMemoryType": "allMessages", + "agentUserMessage": "", + "agentReturnResponseAs": "userMessage", + "agentUpdateState": "", + "agentModelConfig": { + "cache": "", + "modelName": "claude-sonnet-4-0", + "temperature": 0.9, + "streaming": true, + "maxTokensToSample": "", + "topP": "", + "topK": "", + "extendedThinking": "", + "budgetTokens": 1024, + "allowImageUploads": "", + "agentModel": "chatAnthropic" + } + }, + "outputAnchors": [ + { + "id": "agentAgentflow_1-output-agentAgentflow", + "label": "Agent", + "name": "agentAgentflow" + } + ], + "outputs": {}, + "selected": false + }, + "type": "agentFlow", + "width": 213, + "height": 100, + "selected": false, + "positionAbsolute": { + "x": 486.5, + "y": 30.25 + }, + "dragging": false + }, + { + "id": "stickyNoteAgentflow_0", + "position": { + "x": 359.646787967208, + "y": -168.84288303219904 + }, + "data": { + "id": "stickyNoteAgentflow_0", + "label": "Sticky Note", + "version": 1, + "name": "stickyNoteAgentflow", + "type": "StickyNote", + "color": "#fee440", + "baseClasses": ["StickyNote"], + "category": "Agent Flows", + "description": "Add notes to the agent flow", + "inputParams": [ + { + "label": "", + "name": "note", + "type": "string", + "rows": 1, + "placeholder": "Type something here", + "optional": true, + "id": "stickyNoteAgentflow_0-input-note-string", + "display": true + } + ], + "inputAnchors": [], + "inputs": { + "note": "There are two ways of interacting with API\n\n- Request GET, PUT, POST, DELETE tools\n\n- OpenAPI Toolkit" + }, + "outputAnchors": [ + { + "id": "stickyNoteAgentflow_0-output-stickyNoteAgentflow", + "label": "Sticky Note", + "name": "stickyNoteAgentflow" + } + ], + "outputs": {}, + "selected": false + }, + "type": "stickyNote", + "width": 210, + "height": 183, + "selected": false, + "positionAbsolute": { + "x": 359.646787967208, + "y": -168.84288303219904 + }, + "dragging": false + }, + { + "id": "httpAgentflow_0", + "position": { + "x": 738.2972542041965, + "y": 46.68491774985176 + }, + "data": { + "id": "httpAgentflow_0", + "label": "Send HTTP Request", + "version": 1.1, + "name": "httpAgentflow", + "type": "HTTP", + "color": "#FF7F7F", + "baseClasses": ["HTTP"], + "category": "Agent Flows", + "description": "Send a HTTP request", + "inputParams": [ + { + "label": "HTTP Credential", + "name": "credential", + "type": "credential", + "credentialNames": ["httpBasicAuth", "httpBearerToken", "httpApiKey"], + "optional": true, + "id": "httpAgentflow_0-input-credential-credential", + "display": true + }, + { + "label": "Method", + "name": "method", + "type": "options", + "options": [ + { + "label": "GET", + "name": "GET" + }, + { + "label": "POST", + "name": "POST" + }, + { + "label": "PUT", + "name": "PUT" + }, + { + "label": "DELETE", + "name": "DELETE" + }, + { + "label": "PATCH", + "name": "PATCH" + } + ], + "default": "GET", + "id": "httpAgentflow_0-input-method-options", + "display": true + }, + { + "label": "URL", + "name": "url", + "type": "string", + "id": "httpAgentflow_0-input-url-string", + "display": true + }, + { + "label": "Headers", + "name": "headers", + "type": "array", + "acceptVariable": true, + "array": [ + { + "label": "Key", + "name": "key", + "type": "string", + "default": "" + }, + { + "label": "Value", + "name": "value", + "type": "string", + "default": "", + "acceptVariable": true + } + ], + "optional": true, + "id": "httpAgentflow_0-input-headers-array", + "display": true + }, + { + "label": "Query Params", + "name": "queryParams", + "type": "array", + "acceptVariable": true, + "array": [ + { + "label": "Key", + "name": "key", + "type": "string", + "default": "" + }, + { + "label": "Value", + "name": "value", + "type": "string", + "default": "", + "acceptVariable": true + } + ], + "optional": true, + "id": "httpAgentflow_0-input-queryParams-array", + "display": true + }, + { + "label": "Body Type", + "name": "bodyType", + "type": "options", + "options": [ + { + "label": "JSON", + "name": "json" + }, + { + "label": "Raw", + "name": "raw" + }, + { + "label": "Form Data", + "name": "formData" + }, + { + "label": "x-www-form-urlencoded", + "name": "xWwwFormUrlencoded" + } + ], + "optional": true, + "id": "httpAgentflow_0-input-bodyType-options", + "display": true + }, + { + "label": "Body", + "name": "body", + "type": "string", + "acceptVariable": true, + "rows": 4, + "show": { + "bodyType": ["raw", "json"] + }, + "optional": true, + "id": "httpAgentflow_0-input-body-string", + "display": false + }, + { + "label": "Body", + "name": "body", + "type": "array", + "acceptVariable": true, + "show": { + "bodyType": ["xWwwFormUrlencoded", "formData"] + }, + "array": [ + { + "label": "Key", + "name": "key", + "type": "string", + "default": "" + }, + { + "label": "Value", + "name": "value", + "type": "string", + "default": "", + "acceptVariable": true + } + ], + "optional": true, + "id": "httpAgentflow_0-input-body-array", + "display": false + }, + { + "label": "Response Type", + "name": "responseType", + "type": "options", + "options": [ + { + "label": "JSON", + "name": "json" + }, + { + "label": "Text", + "name": "text" + }, + { + "label": "Array Buffer", + "name": "arraybuffer" + }, + { + "label": "Raw (Base64)", + "name": "base64" + } + ], + "optional": true, + "id": "httpAgentflow_0-input-responseType-options", + "display": true + } + ], + "inputAnchors": [], + "inputs": { + "method": "GET", + "url": "", + "headers": "", + "queryParams": "", + "bodyType": "", + "body": "", + "responseType": "" + }, + "outputAnchors": [ + { + "id": "httpAgentflow_0-output-httpAgentflow", + "label": "HTTP", + "name": "httpAgentflow" + } + ], + "outputs": {}, + "selected": false + }, + "type": "agentFlow", + "width": 202, + "height": 66, + "selected": false, + "positionAbsolute": { + "x": 738.2972542041965, + "y": 46.68491774985176 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "startAgentflow_0", + "sourceHandle": "startAgentflow_0-output-startAgentflow", + "target": "agentAgentflow_0", + "targetHandle": "agentAgentflow_0", + "data": { + "sourceColor": "#7EE787", + "targetColor": "#4DD0E1", + "isHumanInput": false + }, + "type": "agentFlow", + "id": "startAgentflow_0-startAgentflow_0-output-startAgentflow-agentAgentflow_0-agentAgentflow_0" + }, + { + "source": "agentAgentflow_0", + "sourceHandle": "agentAgentflow_0-output-agentAgentflow", + "target": "agentAgentflow_1", + "targetHandle": "agentAgentflow_1", + "data": { + "sourceColor": "#4DD0E1", + "targetColor": "#4DD0E1", + "isHumanInput": false + }, + "type": "agentFlow", + "id": "agentAgentflow_0-agentAgentflow_0-output-agentAgentflow-agentAgentflow_1-agentAgentflow_1" + }, + { + "source": "agentAgentflow_1", + "sourceHandle": "agentAgentflow_1-output-agentAgentflow", + "target": "httpAgentflow_0", + "targetHandle": "httpAgentflow_0", + "data": { + "sourceColor": "#4DD0E1", + "targetColor": "#FF7F7F", + "isHumanInput": false + }, + "type": "agentFlow", + "id": "agentAgentflow_1-agentAgentflow_1-output-agentAgentflow-httpAgentflow_0-httpAgentflow_0" + } + ] +} diff --git a/packages/server/marketplaces/agentflowsv2/Iterations.json b/packages/server/marketplaces/agentflowsv2/Iterations.json index b33dd1a54..810d3addf 100644 --- a/packages/server/marketplaces/agentflowsv2/Iterations.json +++ b/packages/server/marketplaces/agentflowsv2/Iterations.json @@ -12,7 +12,7 @@ "data": { "id": "startAgentflow_0", "label": "Start", - "version": 1, + "version": 1.1, "name": "startAgentflow", "type": "Start", "color": "#7EE787", @@ -157,6 +157,15 @@ ], "id": "startAgentflow_0-input-startState-array", "display": true + }, + { + "label": "Persist State", + "name": "startPersistState", + "type": "boolean", + "description": "Persist the state in the same session", + "optional": true, + "id": "startAgentflow_0-input-startPersistState-boolean", + "display": true } ], "inputAnchors": [], @@ -1208,7 +1217,7 @@ "baseOptions": "", "allowImageUploads": "", "imageResolution": "low", - "reasoningEffort": "medium", + "reasoningEffort": "", "agentModel": "chatOpenAI" } }, diff --git a/packages/server/marketplaces/agentflowsv2/SQL Agent.json b/packages/server/marketplaces/agentflowsv2/SQL Agent.json new file mode 100644 index 000000000..70d1bba5c --- /dev/null +++ b/packages/server/marketplaces/agentflowsv2/SQL Agent.json @@ -0,0 +1,2052 @@ +{ + "description": "An agent that can perform question answering over a database", + "usecases": ["SQL"], + "nodes": [ + { + "id": "startAgentflow_0", + "type": "agentFlow", + "position": { + "x": -97, + "y": 108 + }, + "data": { + "id": "startAgentflow_0", + "label": "Start", + "version": 1.1, + "name": "startAgentflow", + "type": "Start", + "color": "#7EE787", + "hideInput": true, + "baseClasses": ["Start"], + "category": "Agent Flows", + "description": "Starting point of the agentflow", + "inputParams": [ + { + "label": "Input Type", + "name": "startInputType", + "type": "options", + "options": [ + { + "label": "Chat Input", + "name": "chatInput", + "description": "Start the conversation with chat input" + }, + { + "label": "Form Input", + "name": "formInput", + "description": "Start the workflow with form inputs" + } + ], + "default": "chatInput", + "id": "startAgentflow_0-input-startInputType-options", + "display": true + }, + { + "label": "Form Title", + "name": "formTitle", + "type": "string", + "placeholder": "Please Fill Out The Form", + "show": { + "startInputType": "formInput" + }, + "id": "startAgentflow_0-input-formTitle-string", + "display": false + }, + { + "label": "Form Description", + "name": "formDescription", + "type": "string", + "placeholder": "Complete all fields below to continue", + "show": { + "startInputType": "formInput" + }, + "id": "startAgentflow_0-input-formDescription-string", + "display": false + }, + { + "label": "Form Input Types", + "name": "formInputTypes", + "description": "Specify the type of form input", + "type": "array", + "show": { + "startInputType": "formInput" + }, + "array": [ + { + "label": "Type", + "name": "type", + "type": "options", + "options": [ + { + "label": "String", + "name": "string" + }, + { + "label": "Number", + "name": "number" + }, + { + "label": "Boolean", + "name": "boolean" + }, + { + "label": "Options", + "name": "options" + } + ], + "default": "string" + }, + { + "label": "Label", + "name": "label", + "type": "string", + "placeholder": "Label for the input" + }, + { + "label": "Variable Name", + "name": "name", + "type": "string", + "placeholder": "Variable name for the input (must be camel case)", + "description": "Variable name must be camel case. For example: firstName, lastName, etc." + }, + { + "label": "Add Options", + "name": "addOptions", + "type": "array", + "show": { + "formInputTypes[$index].type": "options" + }, + "array": [ + { + "label": "Option", + "name": "option", + "type": "string" + } + ] + } + ], + "id": "startAgentflow_0-input-formInputTypes-array", + "display": false + }, + { + "label": "Ephemeral Memory", + "name": "startEphemeralMemory", + "type": "boolean", + "description": "Start fresh for every execution without past chat history", + "optional": true, + "id": "startAgentflow_0-input-startEphemeralMemory-boolean", + "display": true + }, + { + "label": "Flow State", + "name": "startState", + "description": "Runtime state during the execution of the workflow", + "type": "array", + "optional": true, + "array": [ + { + "label": "Key", + "name": "key", + "type": "string", + "placeholder": "Foo" + }, + { + "label": "Value", + "name": "value", + "type": "string", + "placeholder": "Bar", + "optional": true + } + ], + "id": "startAgentflow_0-input-startState-array", + "display": true + }, + { + "label": "Persist State", + "name": "startPersistState", + "type": "boolean", + "description": "Persist the state in the same session", + "optional": true, + "id": "startAgentflow_0-input-startPersistState-boolean", + "display": true + } + ], + "inputAnchors": [], + "inputs": { + "startInputType": "chatInput", + "formTitle": "", + "formDescription": "", + "formInputTypes": "", + "startEphemeralMemory": "", + "startState": [ + { + "key": "sqlQuery", + "value": "" + } + ], + "startPersistState": "" + }, + "outputAnchors": [ + { + "id": "startAgentflow_0-output-startAgentflow", + "label": "Start", + "name": "startAgentflow" + } + ], + "outputs": {}, + "selected": false + }, + "width": 103, + "height": 66, + "selected": false, + "positionAbsolute": { + "x": -97, + "y": 108 + }, + "dragging": false + }, + { + "id": "customFunctionAgentflow_0", + "position": { + "x": 58.5, + "y": 109 + }, + "data": { + "id": "customFunctionAgentflow_0", + "label": "Get DB Schema", + "version": 1, + "name": "customFunctionAgentflow", + "type": "CustomFunction", + "color": "#E4B7FF", + "baseClasses": ["CustomFunction"], + "category": "Agent Flows", + "description": "Execute custom function", + "inputParams": [ + { + "label": "Input Variables", + "name": "customFunctionInputVariables", + "description": "Input variables can be used in the function with prefix $. For example: $foo", + "type": "array", + "optional": true, + "acceptVariable": true, + "array": [ + { + "label": "Variable Name", + "name": "variableName", + "type": "string" + }, + { + "label": "Variable Value", + "name": "variableValue", + "type": "string", + "acceptVariable": true + } + ], + "id": "customFunctionAgentflow_0-input-customFunctionInputVariables-array", + "display": true + }, + { + "label": "Javascript Function", + "name": "customFunctionJavascriptFunction", + "type": "code", + "codeExample": "/*\n* You can use any libraries imported in Flowise\n* You can use properties specified in Input Schema as variables. Ex: Property = userid, Variable = $userid\n* You can get default flow config: $flow.sessionId, $flow.chatId, $flow.chatflowId, $flow.input, $flow.state\n* You can get custom variables: $vars.\n* Must return a string value at the end of function\n*/\n\nconst fetch = require('node-fetch');\nconst url = 'https://api.open-meteo.com/v1/forecast?latitude=52.52&longitude=13.41¤t_weather=true';\nconst options = {\n method: 'GET',\n headers: {\n 'Content-Type': 'application/json'\n }\n};\ntry {\n const response = await fetch(url, options);\n const text = await response.text();\n return text;\n} catch (error) {\n console.error(error);\n return '';\n}", + "description": "The function to execute. Must return a string or an object that can be converted to a string.", + "id": "customFunctionAgentflow_0-input-customFunctionJavascriptFunction-code", + "display": true + }, + { + "label": "Update Flow State", + "name": "customFunctionUpdateState", + "description": "Update runtime state during the execution of the workflow", + "type": "array", + "optional": true, + "acceptVariable": true, + "array": [ + { + "label": "Key", + "name": "key", + "type": "asyncOptions", + "loadMethod": "listRuntimeStateKeys", + "freeSolo": true + }, + { + "label": "Value", + "name": "value", + "type": "string", + "acceptVariable": true, + "acceptNodeOutputAsVariable": true + } + ], + "id": "customFunctionAgentflow_0-input-customFunctionUpdateState-array", + "display": true + } + ], + "inputAnchors": [], + "inputs": { + "customFunctionInputVariables": "", + "customFunctionJavascriptFunction": "const { DataSource } = require('typeorm');\nconst { Pool } = require('pg');\n\nconst HOST = 'localhost';\nconst USER = 'testuser';\nconst PASSWORD = 'testpwd';\nconst DATABASE = 'abudhabi';\nconst PORT = 5555;\n\nlet sqlSchemaPrompt = '';\n\nconst AppDataSource = new DataSource({\n type: 'postgres',\n host: HOST,\n port: PORT,\n username: USER,\n password: PASSWORD,\n database: DATABASE,\n synchronize: false,\n logging: false,\n});\n\nasync function getSQLPrompt() {\n try {\n await AppDataSource.initialize();\n const queryRunner = AppDataSource.createQueryRunner();\n\n // Get all user-defined tables (excluding system tables)\n const tablesResult = await queryRunner.query(`\n SELECT table_name\n FROM information_schema.tables\n WHERE table_schema = 'public' AND table_type = 'BASE TABLE'\n `);\n\n for (const tableRow of tablesResult) {\n const tableName = tableRow.table_name;\n\n const schemaInfo = await queryRunner.query(`\n SELECT column_name, data_type, is_nullable\n FROM information_schema.columns\n WHERE table_name = '${tableName}'\n `);\n\n const createColumns = [];\n const columnNames = [];\n\n for (const column of schemaInfo) {\n const name = column.column_name;\n const type = column.data_type.toUpperCase();\n const notNull = column.is_nullable === 'NO' ? 'NOT NULL' : '';\n columnNames.push(name);\n createColumns.push(`${name} ${type} ${notNull}`);\n }\n\n const sqlCreateTableQuery = `CREATE TABLE ${tableName} (${createColumns.join(', ')})`;\n const sqlSelectTableQuery = `SELECT * FROM ${tableName} LIMIT 3`;\n\n let allValues = [];\n try {\n const rows = await queryRunner.query(sqlSelectTableQuery);\n\n allValues = rows.map(row =>\n columnNames.map(col => row[col]).join(' ')\n );\n } catch (err) {\n allValues.push('[ERROR FETCHING ROWS]');\n }\n\n sqlSchemaPrompt +=\n sqlCreateTableQuery +\n '\\n' +\n sqlSelectTableQuery +\n '\\n' +\n columnNames.join(' ') +\n '\\n' +\n allValues.join('\\n') +\n '\\n\\n';\n }\n\n await queryRunner.release();\n } catch (err) {\n console.error(err);\n throw err;\n }\n}\n\nasync function main() {\n await getSQLPrompt();\n}\n\nawait main();\n\nreturn sqlSchemaPrompt;\n", + "customFunctionUpdateState": "" + }, + "outputAnchors": [ + { + "id": "customFunctionAgentflow_0-output-customFunctionAgentflow", + "label": "Custom Function", + "name": "customFunctionAgentflow" + } + ], + "outputs": {}, + "selected": false + }, + "type": "agentFlow", + "width": 173, + "height": 66, + "selected": false, + "positionAbsolute": { + "x": 58.5, + "y": 109 + }, + "dragging": false + }, + { + "id": "llmAgentflow_0", + "position": { + "x": 272.7184381707814, + "y": 106.61165168988839 + }, + "data": { + "id": "llmAgentflow_0", + "label": "Generate SQL Query", + "version": 1, + "name": "llmAgentflow", + "type": "LLM", + "color": "#64B5F6", + "baseClasses": ["LLM"], + "category": "Agent Flows", + "description": "Large language models to analyze user-provided inputs and generate responses", + "inputParams": [ + { + "label": "Model", + "name": "llmModel", + "type": "asyncOptions", + "loadMethod": "listModels", + "loadConfig": true, + "id": "llmAgentflow_0-input-llmModel-asyncOptions", + "display": true + }, + { + "label": "Messages", + "name": "llmMessages", + "type": "array", + "optional": true, + "acceptVariable": true, + "array": [ + { + "label": "Role", + "name": "role", + "type": "options", + "options": [ + { + "label": "System", + "name": "system" + }, + { + "label": "Assistant", + "name": "assistant" + }, + { + "label": "Developer", + "name": "developer" + }, + { + "label": "User", + "name": "user" + } + ] + }, + { + "label": "Content", + "name": "content", + "type": "string", + "acceptVariable": true, + "generateInstruction": true, + "rows": 4 + } + ], + "id": "llmAgentflow_0-input-llmMessages-array", + "display": true + }, + { + "label": "Enable Memory", + "name": "llmEnableMemory", + "type": "boolean", + "description": "Enable memory for the conversation thread", + "default": true, + "optional": true, + "id": "llmAgentflow_0-input-llmEnableMemory-boolean", + "display": true + }, + { + "label": "Memory Type", + "name": "llmMemoryType", + "type": "options", + "options": [ + { + "label": "All Messages", + "name": "allMessages", + "description": "Retrieve all messages from the conversation" + }, + { + "label": "Window Size", + "name": "windowSize", + "description": "Uses a fixed window size to surface the last N messages" + }, + { + "label": "Conversation Summary", + "name": "conversationSummary", + "description": "Summarizes the whole conversation" + }, + { + "label": "Conversation Summary Buffer", + "name": "conversationSummaryBuffer", + "description": "Summarize conversations once token limit is reached. Default to 2000" + } + ], + "optional": true, + "default": "allMessages", + "show": { + "llmEnableMemory": true + }, + "id": "llmAgentflow_0-input-llmMemoryType-options", + "display": true + }, + { + "label": "Window Size", + "name": "llmMemoryWindowSize", + "type": "number", + "default": "20", + "description": "Uses a fixed window size to surface the last N messages", + "show": { + "llmMemoryType": "windowSize" + }, + "id": "llmAgentflow_0-input-llmMemoryWindowSize-number", + "display": false + }, + { + "label": "Max Token Limit", + "name": "llmMemoryMaxTokenLimit", + "type": "number", + "default": "2000", + "description": "Summarize conversations once token limit is reached. Default to 2000", + "show": { + "llmMemoryType": "conversationSummaryBuffer" + }, + "id": "llmAgentflow_0-input-llmMemoryMaxTokenLimit-number", + "display": false + }, + { + "label": "Input Message", + "name": "llmUserMessage", + "type": "string", + "description": "Add an input message as user message at the end of the conversation", + "rows": 4, + "optional": true, + "acceptVariable": true, + "show": { + "llmEnableMemory": true + }, + "id": "llmAgentflow_0-input-llmUserMessage-string", + "display": true + }, + { + "label": "Return Response As", + "name": "llmReturnResponseAs", + "type": "options", + "options": [ + { + "label": "User Message", + "name": "userMessage" + }, + { + "label": "Assistant Message", + "name": "assistantMessage" + } + ], + "default": "userMessage", + "id": "llmAgentflow_0-input-llmReturnResponseAs-options", + "display": true + }, + { + "label": "JSON Structured Output", + "name": "llmStructuredOutput", + "description": "Instruct the LLM to give output in a JSON structured schema", + "type": "array", + "optional": true, + "acceptVariable": true, + "array": [ + { + "label": "Key", + "name": "key", + "type": "string" + }, + { + "label": "Type", + "name": "type", + "type": "options", + "options": [ + { + "label": "String", + "name": "string" + }, + { + "label": "String Array", + "name": "stringArray" + }, + { + "label": "Number", + "name": "number" + }, + { + "label": "Boolean", + "name": "boolean" + }, + { + "label": "Enum", + "name": "enum" + }, + { + "label": "JSON Array", + "name": "jsonArray" + } + ] + }, + { + "label": "Enum Values", + "name": "enumValues", + "type": "string", + "placeholder": "value1, value2, value3", + "description": "Enum values. Separated by comma", + "optional": true, + "show": { + "llmStructuredOutput[$index].type": "enum" + } + }, + { + "label": "JSON Schema", + "name": "jsonSchema", + "type": "code", + "placeholder": "{\n \"answer\": {\n \"type\": \"string\",\n \"description\": \"Value of the answer\"\n },\n \"reason\": {\n \"type\": \"string\",\n \"description\": \"Reason for the answer\"\n },\n \"optional\": {\n \"type\": \"boolean\"\n },\n \"count\": {\n \"type\": \"number\"\n },\n \"children\": {\n \"type\": \"array\",\n \"items\": {\n \"type\": \"object\",\n \"properties\": {\n \"value\": {\n \"type\": \"string\",\n \"description\": \"Value of the children's answer\"\n }\n }\n }\n }\n}", + "description": "JSON schema for the structured output", + "optional": true, + "show": { + "llmStructuredOutput[$index].type": "jsonArray" + } + }, + { + "label": "Description", + "name": "description", + "type": "string", + "placeholder": "Description of the key" + } + ], + "id": "llmAgentflow_0-input-llmStructuredOutput-array", + "display": true + }, + { + "label": "Update Flow State", + "name": "llmUpdateState", + "description": "Update runtime state during the execution of the workflow", + "type": "array", + "optional": true, + "acceptVariable": true, + "array": [ + { + "label": "Key", + "name": "key", + "type": "asyncOptions", + "loadMethod": "listRuntimeStateKeys", + "freeSolo": true + }, + { + "label": "Value", + "name": "value", + "type": "string", + "acceptVariable": true, + "acceptNodeOutputAsVariable": true + } + ], + "id": "llmAgentflow_0-input-llmUpdateState-array", + "display": true + } + ], + "inputAnchors": [], + "inputs": { + "llmModel": "chatAnthropic", + "llmMessages": [ + { + "role": "system", + "content": "

You are an agent designed to interact with a SQL database. Given an input question, create a syntactically correct sqlite query to run, then look at the results of the query and return the answer. Unless the user specifies a specific number of examples they wish to obtain, always limit your query to at most 5 results. You can order the results by a relevant column to return the most interesting examples in the database. Never query for all the columns from a specific table, only ask for the relevant columns given the question. DO NOT make any DML statements (INSERT, UPDATE, DELETE, DROP etc.) to the database.

Here is the relevant table info:

{{ customFunctionAgentflow_0 }}

Note:

  • Only generate ONE SQL query

" + } + ], + "llmEnableMemory": true, + "llmMemoryType": "allMessages", + "llmUserMessage": "", + "llmReturnResponseAs": "userMessage", + "llmStructuredOutput": [ + { + "key": "sql_query", + "type": "string", + "enumValues": "", + "jsonSchema": "", + "description": "SQL query" + } + ], + "llmUpdateState": [ + { + "key": "sqlQuery", + "value": "

{{ output.sql_query }}

" + } + ], + "llmModelConfig": { + "credential": "", + "modelName": "claude-sonnet-4-0", + "temperature": 0.9, + "streaming": true, + "maxTokensToSample": "", + "topP": "", + "topK": "", + "extendedThinking": "", + "budgetTokens": 1024, + "allowImageUploads": "", + "llmModel": "chatAnthropic" + } + }, + "outputAnchors": [ + { + "id": "llmAgentflow_0-output-llmAgentflow", + "label": "LLM", + "name": "llmAgentflow" + } + ], + "outputs": {}, + "selected": false + }, + "type": "agentFlow", + "width": 213, + "height": 72, + "selected": false, + "positionAbsolute": { + "x": 272.7184381707814, + "y": 106.61165168988839 + }, + "dragging": false + }, + { + "id": "conditionAgentAgentflow_0", + "position": { + "x": 511.16504493033483, + "y": 101.98220225318451 + }, + "data": { + "id": "conditionAgentAgentflow_0", + "label": "Check SQL Query", + "version": 1, + "name": "conditionAgentAgentflow", + "type": "ConditionAgent", + "color": "#ff8fab", + "baseClasses": ["ConditionAgent"], + "category": "Agent Flows", + "description": "Utilize an agent to split flows based on dynamic conditions", + "inputParams": [ + { + "label": "Model", + "name": "conditionAgentModel", + "type": "asyncOptions", + "loadMethod": "listModels", + "loadConfig": true, + "id": "conditionAgentAgentflow_0-input-conditionAgentModel-asyncOptions", + "display": true + }, + { + "label": "Instructions", + "name": "conditionAgentInstructions", + "type": "string", + "description": "A general instructions of what the condition agent should do", + "rows": 4, + "acceptVariable": true, + "placeholder": "Determine if the user is interested in learning about AI", + "id": "conditionAgentAgentflow_0-input-conditionAgentInstructions-string", + "display": true + }, + { + "label": "Input", + "name": "conditionAgentInput", + "type": "string", + "description": "Input to be used for the condition agent", + "rows": 4, + "acceptVariable": true, + "default": "

{{ question }}

", + "id": "conditionAgentAgentflow_0-input-conditionAgentInput-string", + "display": true + }, + { + "label": "Scenarios", + "name": "conditionAgentScenarios", + "description": "Define the scenarios that will be used as the conditions to split the flow", + "type": "array", + "array": [ + { + "label": "Scenario", + "name": "scenario", + "type": "string", + "placeholder": "User is asking for a pizza" + } + ], + "default": [ + { + "scenario": "SQL query is correct and does not contains mistakes" + }, + { + "scenario": "SQL query contains mistakes" + } + ], + "id": "conditionAgentAgentflow_0-input-conditionAgentScenarios-array", + "display": true + } + ], + "inputAnchors": [], + "inputs": { + "conditionAgentModel": "chatOpenAI", + "conditionAgentInstructions": "

You are a SQL expert with a strong attention to detail. Double check the SQL query for common mistakes, including:

- Using NOT IN with NULL values

- Using UNION when UNION ALL should have been used

- Using BETWEEN for exclusive ranges

- Data type mismatch in predicates

- Properly quoting identifiers

- Using the correct number of arguments for functions

- Casting to the correct data type

- Using the proper columns for joins

", + "conditionAgentInput": "

{{ $flow.state.sqlQuery }}

", + "conditionAgentScenarios": [ + { + "scenario": "SQL query is correct and does not contains mistakes" + }, + { + "scenario": "SQL query contains mistakes" + } + ], + "conditionAgentModelConfig": { + "credential": "", + "modelName": "gpt-4o-mini", + "temperature": 0.9, + "streaming": true, + "maxTokens": "", + "topP": "", + "frequencyPenalty": "", + "presencePenalty": "", + "timeout": "", + "strictToolCalling": "", + "stopSequence": "", + "basepath": "", + "proxyUrl": "", + "baseOptions": "", + "allowImageUploads": "", + "imageResolution": "low", + "reasoningEffort": "", + "conditionAgentModel": "chatOpenAI" + } + }, + "outputAnchors": [ + { + "id": "conditionAgentAgentflow_0-output-0", + "label": "Condition Agent", + "name": "conditionAgentAgentflow" + }, + { + "id": "conditionAgentAgentflow_0-output-1", + "label": "Condition Agent", + "name": "conditionAgentAgentflow" + } + ], + "outputs": { + "conditionAgentAgentflow": "" + }, + "selected": false + }, + "type": "agentFlow", + "width": 187, + "height": 80, + "selected": false, + "positionAbsolute": { + "x": 511.16504493033483, + "y": 101.98220225318451 + }, + "dragging": false + }, + { + "id": "loopAgentflow_0", + "position": { + "x": 762.44734302386, + "y": 182.95996068910745 + }, + "data": { + "id": "loopAgentflow_0", + "label": "Regenerate Query", + "version": 1, + "name": "loopAgentflow", + "type": "Loop", + "color": "#FFA07A", + "hideOutput": true, + "baseClasses": ["Loop"], + "category": "Agent Flows", + "description": "Loop back to a previous node", + "inputParams": [ + { + "label": "Loop Back To", + "name": "loopBackToNode", + "type": "asyncOptions", + "loadMethod": "listPreviousNodes", + "freeSolo": true, + "id": "loopAgentflow_0-input-loopBackToNode-asyncOptions", + "display": true + }, + { + "label": "Max Loop Count", + "name": "maxLoopCount", + "type": "number", + "default": 5, + "id": "loopAgentflow_0-input-maxLoopCount-number", + "display": true + } + ], + "inputAnchors": [], + "inputs": { + "loopBackToNode": "llmAgentflow_0-Generate SQL Query", + "maxLoopCount": 5 + }, + "outputAnchors": [], + "outputs": {}, + "selected": false + }, + "type": "agentFlow", + "width": 190, + "height": 66, + "selected": false, + "positionAbsolute": { + "x": 762.44734302386, + "y": 182.95996068910745 + }, + "dragging": false + }, + { + "id": "customFunctionAgentflow_1", + "position": { + "x": 761.3261621815544, + "y": 44.65096212173265 + }, + "data": { + "id": "customFunctionAgentflow_1", + "label": "Run SQL Query", + "version": 1, + "name": "customFunctionAgentflow", + "type": "CustomFunction", + "color": "#E4B7FF", + "baseClasses": ["CustomFunction"], + "category": "Agent Flows", + "description": "Execute custom function", + "inputParams": [ + { + "label": "Input Variables", + "name": "customFunctionInputVariables", + "description": "Input variables can be used in the function with prefix $. For example: $foo", + "type": "array", + "optional": true, + "acceptVariable": true, + "array": [ + { + "label": "Variable Name", + "name": "variableName", + "type": "string" + }, + { + "label": "Variable Value", + "name": "variableValue", + "type": "string", + "acceptVariable": true + } + ], + "id": "customFunctionAgentflow_1-input-customFunctionInputVariables-array", + "display": true + }, + { + "label": "Javascript Function", + "name": "customFunctionJavascriptFunction", + "type": "code", + "codeExample": "/*\n* You can use any libraries imported in Flowise\n* You can use properties specified in Input Schema as variables. Ex: Property = userid, Variable = $userid\n* You can get default flow config: $flow.sessionId, $flow.chatId, $flow.chatflowId, $flow.input, $flow.state\n* You can get custom variables: $vars.\n* Must return a string value at the end of function\n*/\n\nconst fetch = require('node-fetch');\nconst url = 'https://api.open-meteo.com/v1/forecast?latitude=52.52&longitude=13.41¤t_weather=true';\nconst options = {\n method: 'GET',\n headers: {\n 'Content-Type': 'application/json'\n }\n};\ntry {\n const response = await fetch(url, options);\n const text = await response.text();\n return text;\n} catch (error) {\n console.error(error);\n return '';\n}", + "description": "The function to execute. Must return a string or an object that can be converted to a string.", + "id": "customFunctionAgentflow_1-input-customFunctionJavascriptFunction-code", + "display": true + }, + { + "label": "Update Flow State", + "name": "customFunctionUpdateState", + "description": "Update runtime state during the execution of the workflow", + "type": "array", + "optional": true, + "acceptVariable": true, + "array": [ + { + "label": "Key", + "name": "key", + "type": "asyncOptions", + "loadMethod": "listRuntimeStateKeys", + "freeSolo": true + }, + { + "label": "Value", + "name": "value", + "type": "string", + "acceptVariable": true, + "acceptNodeOutputAsVariable": true + } + ], + "id": "customFunctionAgentflow_1-input-customFunctionUpdateState-array", + "display": true + } + ], + "inputAnchors": [], + "inputs": { + "customFunctionInputVariables": [ + { + "variableName": "sqlQuery", + "variableValue": "

{{ $flow.state.sqlQuery }}

" + } + ], + "customFunctionJavascriptFunction": "const { DataSource } = require('typeorm');\nconst { Pool } = require('pg');\n\n// Configuration\nconst HOST = 'localhost';\nconst USER = 'testuser';\nconst PASSWORD = 'testpwd';\nconst DATABASE = 'abudhabi';\nconst PORT = 5555;\n\nconst sqlQuery = $sqlQuery;\n\nconst AppDataSource = new DataSource({\n type: 'postgres',\n host: HOST,\n port: PORT,\n username: USER,\n password: PASSWORD,\n database: DATABASE,\n synchronize: false,\n logging: false,\n});\n\nlet formattedResult = '';\n\nasync function runSQLQuery(query) {\n try {\n await AppDataSource.initialize();\n const queryRunner = AppDataSource.createQueryRunner();\n\n const rows = await queryRunner.query(query);\n console.log('rows =', rows);\n\n if (rows.length === 0) {\n formattedResult = '[No results returned]';\n } else {\n const columnNames = Object.keys(rows[0]);\n const header = columnNames.join(' ');\n const values = rows.map(row =>\n columnNames.map(col => row[col]).join(' ')\n );\n\n formattedResult = query + '\\n' + header + '\\n' + values.join('\\n');\n }\n\n await queryRunner.release();\n } catch (err) {\n console.error('[ERROR]', err);\n formattedResult = `[Error executing query]: ${err}`;\n }\n\n return formattedResult;\n}\n\nasync function main() {\n formattedResult = await runSQLQuery(sqlQuery);\n}\n\nawait main();\n\nreturn formattedResult;\n", + "customFunctionUpdateState": "" + }, + "outputAnchors": [ + { + "id": "customFunctionAgentflow_1-output-customFunctionAgentflow", + "label": "Custom Function", + "name": "customFunctionAgentflow" + } + ], + "outputs": {}, + "selected": false + }, + "type": "agentFlow", + "width": 171, + "height": 66, + "selected": false, + "positionAbsolute": { + "x": 761.3261621815544, + "y": 44.65096212173265 + }, + "dragging": false + }, + { + "id": "llmAgentflow_1", + "position": { + "x": 1238.7660285501179, + "y": 20.56658816269558 + }, + "data": { + "id": "llmAgentflow_1", + "label": "Return Response", + "version": 1, + "name": "llmAgentflow", + "type": "LLM", + "color": "#64B5F6", + "baseClasses": ["LLM"], + "category": "Agent Flows", + "description": "Large language models to analyze user-provided inputs and generate responses", + "inputParams": [ + { + "label": "Model", + "name": "llmModel", + "type": "asyncOptions", + "loadMethod": "listModels", + "loadConfig": true, + "id": "llmAgentflow_1-input-llmModel-asyncOptions", + "display": true + }, + { + "label": "Messages", + "name": "llmMessages", + "type": "array", + "optional": true, + "acceptVariable": true, + "array": [ + { + "label": "Role", + "name": "role", + "type": "options", + "options": [ + { + "label": "System", + "name": "system" + }, + { + "label": "Assistant", + "name": "assistant" + }, + { + "label": "Developer", + "name": "developer" + }, + { + "label": "User", + "name": "user" + } + ] + }, + { + "label": "Content", + "name": "content", + "type": "string", + "acceptVariable": true, + "generateInstruction": true, + "rows": 4 + } + ], + "id": "llmAgentflow_1-input-llmMessages-array", + "display": true + }, + { + "label": "Enable Memory", + "name": "llmEnableMemory", + "type": "boolean", + "description": "Enable memory for the conversation thread", + "default": true, + "optional": true, + "id": "llmAgentflow_1-input-llmEnableMemory-boolean", + "display": true + }, + { + "label": "Memory Type", + "name": "llmMemoryType", + "type": "options", + "options": [ + { + "label": "All Messages", + "name": "allMessages", + "description": "Retrieve all messages from the conversation" + }, + { + "label": "Window Size", + "name": "windowSize", + "description": "Uses a fixed window size to surface the last N messages" + }, + { + "label": "Conversation Summary", + "name": "conversationSummary", + "description": "Summarizes the whole conversation" + }, + { + "label": "Conversation Summary Buffer", + "name": "conversationSummaryBuffer", + "description": "Summarize conversations once token limit is reached. Default to 2000" + } + ], + "optional": true, + "default": "allMessages", + "show": { + "llmEnableMemory": true + }, + "id": "llmAgentflow_1-input-llmMemoryType-options", + "display": true + }, + { + "label": "Window Size", + "name": "llmMemoryWindowSize", + "type": "number", + "default": "20", + "description": "Uses a fixed window size to surface the last N messages", + "show": { + "llmMemoryType": "windowSize" + }, + "id": "llmAgentflow_1-input-llmMemoryWindowSize-number", + "display": false + }, + { + "label": "Max Token Limit", + "name": "llmMemoryMaxTokenLimit", + "type": "number", + "default": "2000", + "description": "Summarize conversations once token limit is reached. Default to 2000", + "show": { + "llmMemoryType": "conversationSummaryBuffer" + }, + "id": "llmAgentflow_1-input-llmMemoryMaxTokenLimit-number", + "display": false + }, + { + "label": "Input Message", + "name": "llmUserMessage", + "type": "string", + "description": "Add an input message as user message at the end of the conversation", + "rows": 4, + "optional": true, + "acceptVariable": true, + "show": { + "llmEnableMemory": true + }, + "id": "llmAgentflow_1-input-llmUserMessage-string", + "display": true + }, + { + "label": "Return Response As", + "name": "llmReturnResponseAs", + "type": "options", + "options": [ + { + "label": "User Message", + "name": "userMessage" + }, + { + "label": "Assistant Message", + "name": "assistantMessage" + } + ], + "default": "userMessage", + "id": "llmAgentflow_1-input-llmReturnResponseAs-options", + "display": true + }, + { + "label": "JSON Structured Output", + "name": "llmStructuredOutput", + "description": "Instruct the LLM to give output in a JSON structured schema", + "type": "array", + "optional": true, + "acceptVariable": true, + "array": [ + { + "label": "Key", + "name": "key", + "type": "string" + }, + { + "label": "Type", + "name": "type", + "type": "options", + "options": [ + { + "label": "String", + "name": "string" + }, + { + "label": "String Array", + "name": "stringArray" + }, + { + "label": "Number", + "name": "number" + }, + { + "label": "Boolean", + "name": "boolean" + }, + { + "label": "Enum", + "name": "enum" + }, + { + "label": "JSON Array", + "name": "jsonArray" + } + ] + }, + { + "label": "Enum Values", + "name": "enumValues", + "type": "string", + "placeholder": "value1, value2, value3", + "description": "Enum values. Separated by comma", + "optional": true, + "show": { + "llmStructuredOutput[$index].type": "enum" + } + }, + { + "label": "JSON Schema", + "name": "jsonSchema", + "type": "code", + "placeholder": "{\n \"answer\": {\n \"type\": \"string\",\n \"description\": \"Value of the answer\"\n },\n \"reason\": {\n \"type\": \"string\",\n \"description\": \"Reason for the answer\"\n },\n \"optional\": {\n \"type\": \"boolean\"\n },\n \"count\": {\n \"type\": \"number\"\n },\n \"children\": {\n \"type\": \"array\",\n \"items\": {\n \"type\": \"object\",\n \"properties\": {\n \"value\": {\n \"type\": \"string\",\n \"description\": \"Value of the children's answer\"\n }\n }\n }\n }\n}", + "description": "JSON schema for the structured output", + "optional": true, + "show": { + "llmStructuredOutput[$index].type": "jsonArray" + } + }, + { + "label": "Description", + "name": "description", + "type": "string", + "placeholder": "Description of the key" + } + ], + "id": "llmAgentflow_1-input-llmStructuredOutput-array", + "display": true + }, + { + "label": "Update Flow State", + "name": "llmUpdateState", + "description": "Update runtime state during the execution of the workflow", + "type": "array", + "optional": true, + "acceptVariable": true, + "array": [ + { + "label": "Key", + "name": "key", + "type": "asyncOptions", + "loadMethod": "listRuntimeStateKeys", + "freeSolo": true + }, + { + "label": "Value", + "name": "value", + "type": "string", + "acceptVariable": true, + "acceptNodeOutputAsVariable": true + } + ], + "id": "llmAgentflow_1-input-llmUpdateState-array", + "display": true + } + ], + "inputAnchors": [], + "inputs": { + "llmModel": "chatGoogleGenerativeAI", + "llmMessages": [], + "llmEnableMemory": true, + "llmMemoryType": "allMessages", + "llmUserMessage": "

{{ customFunctionAgentflow_1 }}

", + "llmReturnResponseAs": "userMessage", + "llmStructuredOutput": "", + "llmUpdateState": "", + "llmModelConfig": { + "credential": "", + "modelName": "gemini-2.0-flash", + "customModelName": "", + "temperature": 0.9, + "streaming": true, + "maxOutputTokens": "", + "topP": "", + "topK": "", + "harmCategory": "", + "harmBlockThreshold": "", + "baseUrl": "", + "allowImageUploads": "", + "llmModel": "chatGoogleGenerativeAI" + }, + "undefined": "" + }, + "outputAnchors": [ + { + "id": "llmAgentflow_1-output-llmAgentflow", + "label": "LLM", + "name": "llmAgentflow" + } + ], + "outputs": {}, + "selected": false + }, + "type": "agentFlow", + "width": 199, + "height": 72, + "selected": false, + "positionAbsolute": { + "x": 1238.7660285501179, + "y": 20.56658816269558 + }, + "dragging": false + }, + { + "id": "conditionAgentAgentflow_1", + "position": { + "x": 966.5436041632489, + "y": 57.77868724229256 + }, + "data": { + "id": "conditionAgentAgentflow_1", + "label": "Check Result", + "version": 1, + "name": "conditionAgentAgentflow", + "type": "ConditionAgent", + "color": "#ff8fab", + "baseClasses": ["ConditionAgent"], + "category": "Agent Flows", + "description": "Utilize an agent to split flows based on dynamic conditions", + "inputParams": [ + { + "label": "Model", + "name": "conditionAgentModel", + "type": "asyncOptions", + "loadMethod": "listModels", + "loadConfig": true, + "id": "conditionAgentAgentflow_1-input-conditionAgentModel-asyncOptions", + "display": true + }, + { + "label": "Instructions", + "name": "conditionAgentInstructions", + "type": "string", + "description": "A general instructions of what the condition agent should do", + "rows": 4, + "acceptVariable": true, + "placeholder": "Determine if the user is interested in learning about AI", + "id": "conditionAgentAgentflow_1-input-conditionAgentInstructions-string", + "display": true + }, + { + "label": "Input", + "name": "conditionAgentInput", + "type": "string", + "description": "Input to be used for the condition agent", + "rows": 4, + "acceptVariable": true, + "default": "

{{ question }}

", + "id": "conditionAgentAgentflow_1-input-conditionAgentInput-string", + "display": true + }, + { + "label": "Scenarios", + "name": "conditionAgentScenarios", + "description": "Define the scenarios that will be used as the conditions to split the flow", + "type": "array", + "array": [ + { + "label": "Scenario", + "name": "scenario", + "type": "string", + "placeholder": "User is asking for a pizza" + } + ], + "default": [ + { + "scenario": "Result is correct and does not contains error" + }, + { + "scenario": "Result query contains error" + } + ], + "id": "conditionAgentAgentflow_1-input-conditionAgentScenarios-array", + "display": true + } + ], + "inputAnchors": [], + "inputs": { + "conditionAgentModel": "chatMistralAI", + "conditionAgentInstructions": "

You are a SQL expert. Check if the query result is correct or contains error.

", + "conditionAgentInput": "

{{ customFunctionAgentflow_1 }}

", + "conditionAgentScenarios": [ + { + "scenario": "Result is correct and does not contains error" + }, + { + "scenario": "Result query contains error" + } + ], + "conditionAgentModelConfig": { + "credential": "", + "modelName": "mistral-medium-latest", + "temperature": 0.9, + "streaming": true, + "maxOutputTokens": "", + "topP": "", + "randomSeed": "", + "safeMode": "", + "overrideEndpoint": "", + "conditionAgentModel": "chatMistralAI" + } + }, + "outputAnchors": [ + { + "id": "conditionAgentAgentflow_1-output-0", + "label": "Condition Agent", + "name": "conditionAgentAgentflow" + }, + { + "id": "conditionAgentAgentflow_1-output-1", + "label": "Condition Agent", + "name": "conditionAgentAgentflow" + } + ], + "outputs": { + "conditionAgentAgentflow": "" + }, + "selected": false + }, + "type": "agentFlow", + "width": 228, + "height": 80, + "selected": false, + "positionAbsolute": { + "x": 966.5436041632489, + "y": 57.77868724229256 + }, + "dragging": false + }, + { + "id": "loopAgentflow_1", + "position": { + "x": 1501.0055934843515, + "y": 140.83809747682727 + }, + "data": { + "id": "loopAgentflow_1", + "label": "Recheck SQL Query", + "version": 1, + "name": "loopAgentflow", + "type": "Loop", + "color": "#FFA07A", + "hideOutput": true, + "baseClasses": ["Loop"], + "category": "Agent Flows", + "description": "Loop back to a previous node", + "inputParams": [ + { + "label": "Loop Back To", + "name": "loopBackToNode", + "type": "asyncOptions", + "loadMethod": "listPreviousNodes", + "freeSolo": true, + "id": "loopAgentflow_1-input-loopBackToNode-asyncOptions", + "display": true + }, + { + "label": "Max Loop Count", + "name": "maxLoopCount", + "type": "number", + "default": 5, + "id": "loopAgentflow_1-input-maxLoopCount-number", + "display": true + } + ], + "inputAnchors": [], + "inputs": { + "loopBackToNode": "conditionAgentAgentflow_0-Check SQL Query", + "maxLoopCount": 5, + "undefined": "" + }, + "outputAnchors": [], + "outputs": {}, + "selected": false + }, + "type": "agentFlow", + "width": 202, + "height": 66, + "selected": false, + "positionAbsolute": { + "x": 1501.0055934843515, + "y": 140.83809747682727 + }, + "dragging": false + }, + { + "id": "llmAgentflow_2", + "position": { + "x": 1235.4868883628933, + "y": 137.82100195002667 + }, + "data": { + "id": "llmAgentflow_2", + "label": "Regenerate SQL Query", + "version": 1, + "name": "llmAgentflow", + "type": "LLM", + "color": "#64B5F6", + "baseClasses": ["LLM"], + "category": "Agent Flows", + "description": "Large language models to analyze user-provided inputs and generate responses", + "inputParams": [ + { + "label": "Model", + "name": "llmModel", + "type": "asyncOptions", + "loadMethod": "listModels", + "loadConfig": true, + "id": "llmAgentflow_2-input-llmModel-asyncOptions", + "display": true + }, + { + "label": "Messages", + "name": "llmMessages", + "type": "array", + "optional": true, + "acceptVariable": true, + "array": [ + { + "label": "Role", + "name": "role", + "type": "options", + "options": [ + { + "label": "System", + "name": "system" + }, + { + "label": "Assistant", + "name": "assistant" + }, + { + "label": "Developer", + "name": "developer" + }, + { + "label": "User", + "name": "user" + } + ] + }, + { + "label": "Content", + "name": "content", + "type": "string", + "acceptVariable": true, + "generateInstruction": true, + "rows": 4 + } + ], + "id": "llmAgentflow_2-input-llmMessages-array", + "display": true + }, + { + "label": "Enable Memory", + "name": "llmEnableMemory", + "type": "boolean", + "description": "Enable memory for the conversation thread", + "default": true, + "optional": true, + "id": "llmAgentflow_2-input-llmEnableMemory-boolean", + "display": true + }, + { + "label": "Memory Type", + "name": "llmMemoryType", + "type": "options", + "options": [ + { + "label": "All Messages", + "name": "allMessages", + "description": "Retrieve all messages from the conversation" + }, + { + "label": "Window Size", + "name": "windowSize", + "description": "Uses a fixed window size to surface the last N messages" + }, + { + "label": "Conversation Summary", + "name": "conversationSummary", + "description": "Summarizes the whole conversation" + }, + { + "label": "Conversation Summary Buffer", + "name": "conversationSummaryBuffer", + "description": "Summarize conversations once token limit is reached. Default to 2000" + } + ], + "optional": true, + "default": "allMessages", + "show": { + "llmEnableMemory": true + }, + "id": "llmAgentflow_2-input-llmMemoryType-options", + "display": true + }, + { + "label": "Window Size", + "name": "llmMemoryWindowSize", + "type": "number", + "default": "20", + "description": "Uses a fixed window size to surface the last N messages", + "show": { + "llmMemoryType": "windowSize" + }, + "id": "llmAgentflow_2-input-llmMemoryWindowSize-number", + "display": false + }, + { + "label": "Max Token Limit", + "name": "llmMemoryMaxTokenLimit", + "type": "number", + "default": "2000", + "description": "Summarize conversations once token limit is reached. Default to 2000", + "show": { + "llmMemoryType": "conversationSummaryBuffer" + }, + "id": "llmAgentflow_2-input-llmMemoryMaxTokenLimit-number", + "display": false + }, + { + "label": "Input Message", + "name": "llmUserMessage", + "type": "string", + "description": "Add an input message as user message at the end of the conversation", + "rows": 4, + "optional": true, + "acceptVariable": true, + "show": { + "llmEnableMemory": true + }, + "id": "llmAgentflow_2-input-llmUserMessage-string", + "display": true + }, + { + "label": "Return Response As", + "name": "llmReturnResponseAs", + "type": "options", + "options": [ + { + "label": "User Message", + "name": "userMessage" + }, + { + "label": "Assistant Message", + "name": "assistantMessage" + } + ], + "default": "userMessage", + "id": "llmAgentflow_2-input-llmReturnResponseAs-options", + "display": true + }, + { + "label": "JSON Structured Output", + "name": "llmStructuredOutput", + "description": "Instruct the LLM to give output in a JSON structured schema", + "type": "array", + "optional": true, + "acceptVariable": true, + "array": [ + { + "label": "Key", + "name": "key", + "type": "string" + }, + { + "label": "Type", + "name": "type", + "type": "options", + "options": [ + { + "label": "String", + "name": "string" + }, + { + "label": "String Array", + "name": "stringArray" + }, + { + "label": "Number", + "name": "number" + }, + { + "label": "Boolean", + "name": "boolean" + }, + { + "label": "Enum", + "name": "enum" + }, + { + "label": "JSON Array", + "name": "jsonArray" + } + ] + }, + { + "label": "Enum Values", + "name": "enumValues", + "type": "string", + "placeholder": "value1, value2, value3", + "description": "Enum values. Separated by comma", + "optional": true, + "show": { + "llmStructuredOutput[$index].type": "enum" + } + }, + { + "label": "JSON Schema", + "name": "jsonSchema", + "type": "code", + "placeholder": "{\n \"answer\": {\n \"type\": \"string\",\n \"description\": \"Value of the answer\"\n },\n \"reason\": {\n \"type\": \"string\",\n \"description\": \"Reason for the answer\"\n },\n \"optional\": {\n \"type\": \"boolean\"\n },\n \"count\": {\n \"type\": \"number\"\n },\n \"children\": {\n \"type\": \"array\",\n \"items\": {\n \"type\": \"object\",\n \"properties\": {\n \"value\": {\n \"type\": \"string\",\n \"description\": \"Value of the children's answer\"\n }\n }\n }\n }\n}", + "description": "JSON schema for the structured output", + "optional": true, + "show": { + "llmStructuredOutput[$index].type": "jsonArray" + } + }, + { + "label": "Description", + "name": "description", + "type": "string", + "placeholder": "Description of the key" + } + ], + "id": "llmAgentflow_2-input-llmStructuredOutput-array", + "display": true + }, + { + "label": "Update Flow State", + "name": "llmUpdateState", + "description": "Update runtime state during the execution of the workflow", + "type": "array", + "optional": true, + "acceptVariable": true, + "array": [ + { + "label": "Key", + "name": "key", + "type": "asyncOptions", + "loadMethod": "listRuntimeStateKeys", + "freeSolo": true + }, + { + "label": "Value", + "name": "value", + "type": "string", + "acceptVariable": true, + "acceptNodeOutputAsVariable": true + } + ], + "id": "llmAgentflow_2-input-llmUpdateState-array", + "display": true + } + ], + "inputAnchors": [], + "inputs": { + "llmModel": "chatAnthropic", + "llmMessages": [ + { + "role": "system", + "content": "

You are an agent designed to interact with a SQL database. Given an input question, create a syntactically correct sqlite query to run, then look at the results of the query and return the answer. Unless the user specifies a specific number of examples they wish to obtain, always limit your query to at most 5 results. You can order the results by a relevant column to return the most interesting examples in the database. Never query for all the columns from a specific table, only ask for the relevant columns given the question. DO NOT make any DML statements (INSERT, UPDATE, DELETE, DROP etc.) to the database.

Here is the relevant table info:

{{ customFunctionAgentflow_0 }}

" + } + ], + "llmEnableMemory": true, + "llmMemoryType": "allMessages", + "llmUserMessage": "

Given the generated SQL Query: {{ $flow.state.sqlQuery }}

I have the following error: {{ customFunctionAgentflow_1 }}

Regenerate a new SQL Query that will fix the error

", + "llmReturnResponseAs": "userMessage", + "llmStructuredOutput": [ + { + "key": "sql_query", + "type": "string", + "enumValues": "", + "jsonSchema": "", + "description": "SQL query" + } + ], + "llmUpdateState": [ + { + "key": "sqlQuery", + "value": "

{{ output.sql_query }}

" + } + ], + "llmModelConfig": { + "credential": "", + "modelName": "claude-sonnet-4-0", + "temperature": 0.9, + "streaming": true, + "maxTokensToSample": "", + "topP": "", + "topK": "", + "extendedThinking": "", + "budgetTokens": 1024, + "allowImageUploads": "", + "llmModel": "chatAnthropic" + } + }, + "outputAnchors": [ + { + "id": "llmAgentflow_2-output-llmAgentflow", + "label": "LLM", + "name": "llmAgentflow" + } + ], + "outputs": {}, + "selected": false + }, + "type": "agentFlow", + "width": 220, + "height": 72, + "selected": false, + "positionAbsolute": { + "x": 1235.4868883628933, + "y": 137.82100195002667 + }, + "dragging": false + }, + { + "id": "stickyNoteAgentflow_0", + "position": { + "x": 973.4435331695138, + "y": 156.551869199512 + }, + "data": { + "id": "stickyNoteAgentflow_0", + "label": "Sticky Note", + "version": 1, + "name": "stickyNoteAgentflow", + "type": "StickyNote", + "color": "#fee440", + "baseClasses": ["StickyNote"], + "category": "Agent Flows", + "description": "Add notes to the agent flow", + "inputParams": [ + { + "label": "", + "name": "note", + "type": "string", + "rows": 1, + "placeholder": "Type something here", + "optional": true, + "id": "stickyNoteAgentflow_0-input-note-string", + "display": true + } + ], + "inputAnchors": [], + "inputs": { + "note": "This is an auto correct mechanism that regenerate sql query if result contains error" + }, + "outputAnchors": [ + { + "id": "stickyNoteAgentflow_0-output-stickyNoteAgentflow", + "label": "Sticky Note", + "name": "stickyNoteAgentflow" + } + ], + "outputs": {}, + "selected": false + }, + "type": "stickyNote", + "width": 210, + "height": 123, + "selected": false, + "positionAbsolute": { + "x": 973.4435331695138, + "y": 156.551869199512 + }, + "dragging": false + }, + { + "id": "stickyNoteAgentflow_1", + "position": { + "x": 514.8377809033279, + "y": 200.97994630025966 + }, + "data": { + "id": "stickyNoteAgentflow_1", + "label": "Sticky Note (1)", + "version": 1, + "name": "stickyNoteAgentflow", + "type": "StickyNote", + "color": "#fee440", + "baseClasses": ["StickyNote"], + "category": "Agent Flows", + "description": "Add notes to the agent flow", + "inputParams": [ + { + "label": "", + "name": "note", + "type": "string", + "rows": 1, + "placeholder": "Type something here", + "optional": true, + "id": "stickyNoteAgentflow_1-input-note-string", + "display": true + } + ], + "inputAnchors": [], + "inputs": { + "note": "Check if generated SQL query contains errors/mistakes, if yes - regenerate" + }, + "outputAnchors": [ + { + "id": "stickyNoteAgentflow_1-output-stickyNoteAgentflow", + "label": "Sticky Note", + "name": "stickyNoteAgentflow" + } + ], + "outputs": {}, + "selected": false + }, + "type": "stickyNote", + "width": 210, + "height": 123, + "selected": false, + "positionAbsolute": { + "x": 514.8377809033279, + "y": 200.97994630025966 + }, + "dragging": false + }, + { + "id": "stickyNoteAgentflow_2", + "position": { + "x": 40.21835449345774, + "y": 6.978337213146034 + }, + "data": { + "id": "stickyNoteAgentflow_2", + "label": "Sticky Note (1) (2)", + "version": 1, + "name": "stickyNoteAgentflow", + "type": "StickyNote", + "color": "#fee440", + "baseClasses": ["StickyNote"], + "category": "Agent Flows", + "description": "Add notes to the agent flow", + "inputParams": [ + { + "label": "", + "name": "note", + "type": "string", + "rows": 1, + "placeholder": "Type something here", + "optional": true, + "id": "stickyNoteAgentflow_2-input-note-string", + "display": true + } + ], + "inputAnchors": [], + "inputs": { + "note": "Retrieve database schema" + }, + "outputAnchors": [ + { + "id": "stickyNoteAgentflow_2-output-stickyNoteAgentflow", + "label": "Sticky Note", + "name": "stickyNoteAgentflow" + } + ], + "outputs": {}, + "selected": false + }, + "type": "stickyNote", + "width": 210, + "height": 82, + "selected": false, + "positionAbsolute": { + "x": 40.21835449345774, + "y": 6.978337213146034 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "startAgentflow_0", + "sourceHandle": "startAgentflow_0-output-startAgentflow", + "target": "customFunctionAgentflow_0", + "targetHandle": "customFunctionAgentflow_0", + "data": { + "sourceColor": "#7EE787", + "targetColor": "#E4B7FF", + "isHumanInput": false + }, + "type": "agentFlow", + "id": "startAgentflow_0-startAgentflow_0-output-startAgentflow-customFunctionAgentflow_0-customFunctionAgentflow_0" + }, + { + "source": "customFunctionAgentflow_0", + "sourceHandle": "customFunctionAgentflow_0-output-customFunctionAgentflow", + "target": "llmAgentflow_0", + "targetHandle": "llmAgentflow_0", + "data": { + "sourceColor": "#E4B7FF", + "targetColor": "#64B5F6", + "isHumanInput": false + }, + "type": "agentFlow", + "id": "customFunctionAgentflow_0-customFunctionAgentflow_0-output-customFunctionAgentflow-llmAgentflow_0-llmAgentflow_0" + }, + { + "source": "llmAgentflow_0", + "sourceHandle": "llmAgentflow_0-output-llmAgentflow", + "target": "conditionAgentAgentflow_0", + "targetHandle": "conditionAgentAgentflow_0", + "data": { + "sourceColor": "#64B5F6", + "targetColor": "#ff8fab", + "isHumanInput": false + }, + "type": "agentFlow", + "id": "llmAgentflow_0-llmAgentflow_0-output-llmAgentflow-conditionAgentAgentflow_0-conditionAgentAgentflow_0" + }, + { + "source": "conditionAgentAgentflow_0", + "sourceHandle": "conditionAgentAgentflow_0-output-0", + "target": "customFunctionAgentflow_1", + "targetHandle": "customFunctionAgentflow_1", + "data": { + "sourceColor": "#ff8fab", + "targetColor": "#E4B7FF", + "edgeLabel": "0", + "isHumanInput": false + }, + "type": "agentFlow", + "id": "conditionAgentAgentflow_0-conditionAgentAgentflow_0-output-0-customFunctionAgentflow_1-customFunctionAgentflow_1" + }, + { + "source": "conditionAgentAgentflow_0", + "sourceHandle": "conditionAgentAgentflow_0-output-1", + "target": "loopAgentflow_0", + "targetHandle": "loopAgentflow_0", + "data": { + "sourceColor": "#ff8fab", + "targetColor": "#FFA07A", + "edgeLabel": "1", + "isHumanInput": false + }, + "type": "agentFlow", + "id": "conditionAgentAgentflow_0-conditionAgentAgentflow_0-output-1-loopAgentflow_0-loopAgentflow_0" + }, + { + "source": "customFunctionAgentflow_1", + "sourceHandle": "customFunctionAgentflow_1-output-customFunctionAgentflow", + "target": "conditionAgentAgentflow_1", + "targetHandle": "conditionAgentAgentflow_1", + "data": { + "sourceColor": "#E4B7FF", + "targetColor": "#ff8fab", + "isHumanInput": false + }, + "type": "agentFlow", + "id": "customFunctionAgentflow_1-customFunctionAgentflow_1-output-customFunctionAgentflow-conditionAgentAgentflow_1-conditionAgentAgentflow_1" + }, + { + "source": "conditionAgentAgentflow_1", + "sourceHandle": "conditionAgentAgentflow_1-output-0", + "target": "llmAgentflow_1", + "targetHandle": "llmAgentflow_1", + "data": { + "sourceColor": "#ff8fab", + "targetColor": "#64B5F6", + "edgeLabel": "0", + "isHumanInput": false + }, + "type": "agentFlow", + "id": "conditionAgentAgentflow_1-conditionAgentAgentflow_1-output-0-llmAgentflow_1-llmAgentflow_1" + }, + { + "source": "conditionAgentAgentflow_1", + "sourceHandle": "conditionAgentAgentflow_1-output-1", + "target": "llmAgentflow_2", + "targetHandle": "llmAgentflow_2", + "data": { + "sourceColor": "#ff8fab", + "targetColor": "#64B5F6", + "edgeLabel": "1", + "isHumanInput": false + }, + "type": "agentFlow", + "id": "conditionAgentAgentflow_1-conditionAgentAgentflow_1-output-1-llmAgentflow_2-llmAgentflow_2" + }, + { + "source": "llmAgentflow_2", + "sourceHandle": "llmAgentflow_2-output-llmAgentflow", + "target": "loopAgentflow_1", + "targetHandle": "loopAgentflow_1", + "data": { + "sourceColor": "#64B5F6", + "targetColor": "#FFA07A", + "isHumanInput": false + }, + "type": "agentFlow", + "id": "llmAgentflow_2-llmAgentflow_2-output-llmAgentflow-loopAgentflow_1-loopAgentflow_1" + } + ] +} diff --git a/packages/server/marketplaces/agentflowsv2/Email Reply HITL Agent.json b/packages/server/marketplaces/agentflowsv2/Simple RAG.json similarity index 65% rename from packages/server/marketplaces/agentflowsv2/Email Reply HITL Agent.json rename to packages/server/marketplaces/agentflowsv2/Simple RAG.json index 0174b1a90..c868cc5eb 100644 --- a/packages/server/marketplaces/agentflowsv2/Email Reply HITL Agent.json +++ b/packages/server/marketplaces/agentflowsv2/Simple RAG.json @@ -1,18 +1,18 @@ { - "description": "An email reply HITL (human in the loop) agent that can proceed or refine the email with user input", - "usecases": ["Human In Loop"], + "description": "A basic RAG agent that can retrieve documents from document store and answer questions", + "usecases": ["Documents QnA"], "nodes": [ { "id": "startAgentflow_0", "type": "agentFlow", "position": { - "x": -212.0817769699585, - "y": 95.2304753249555 + "x": 64, + "y": 98.5 }, "data": { "id": "startAgentflow_0", "label": "Start", - "version": 1, + "version": 1.1, "name": "startAgentflow", "type": "Start", "color": "#7EE787", @@ -133,7 +133,9 @@ "name": "startEphemeralMemory", "type": "boolean", "description": "Start fresh for every execution without past chat history", - "optional": true + "optional": true, + "id": "startAgentflow_0-input-startEphemeralMemory-boolean", + "display": true }, { "label": "Flow State", @@ -152,11 +154,21 @@ "label": "Value", "name": "value", "type": "string", - "placeholder": "Bar" + "placeholder": "Bar", + "optional": true } ], "id": "startAgentflow_0-input-startState-array", "display": true + }, + { + "label": "Persist State", + "name": "startPersistState", + "type": "boolean", + "description": "Persist the state in the same session", + "optional": true, + "id": "startAgentflow_0-input-startPersistState-boolean", + "display": true } ], "inputAnchors": [], @@ -165,7 +177,9 @@ "formTitle": "", "formDescription": "", "formInputTypes": "", - "startState": "" + "startEphemeralMemory": "", + "startState": "", + "startPersistState": "" }, "outputAnchors": [ { @@ -177,24 +191,24 @@ "outputs": {}, "selected": false }, - "width": 101, - "height": 65, - "selected": false, + "width": 103, + "height": 66, "positionAbsolute": { - "x": -212.0817769699585, - "y": 95.2304753249555 + "x": 64, + "y": 98.5 }, + "selected": false, "dragging": false }, { "id": "agentAgentflow_0", "position": { - "x": -62.25, - "y": 76 + "x": 216.75, + "y": 96.5 }, "data": { "id": "agentAgentflow_0", - "label": "Email Reply Agent", + "label": "QnA", "version": 1, "name": "agentAgentflow", "type": "Agent", @@ -485,24 +499,18 @@ "agentMessages": [ { "role": "system", - "content": "

You are a customer support agent working in Flowise Inc. Write a professional email reply to user's query. Use the web search tools to get more details about the prospect.

" + "content": "

You are a helpful assistant. Using the provided context, answer the user's question to the best of your ability using the resources provided.

If there is nothing in the context relevant to the question at hand, just say \"Hmm, I'm not sure.\" Don't try to make up an answer.

" } ], - "agentTools": [ + "agentTools": "", + "agentKnowledgeDocumentStores": [ { - "agentSelectedTool": "googleCustomSearch", - "agentSelectedToolConfig": { - "agentSelectedTool": "googleCustomSearch" - } - }, - { - "agentSelectedTool": "currentDateTime", - "agentSelectedToolConfig": { - "agentSelectedTool": "currentDateTime" - } + "documentStore": "25429b8f-0377-4762-9cda-0d5366cf022c:AI-Paper", + "docStoreDescription": "This paper provides an extensive overview of artificial intelligence-generated content (AIGC), including its definition, capabilities, applications, challenges, and future directions, serving as a valuable resource for researchers and industry professionals to understand and harness AIGC's potential.", + "returnSourceDocuments": true } ], - "agentKnowledgeDocumentStores": "", + "agentKnowledgeVSEmbeddings": "", "agentEnableMemory": true, "agentMemoryType": "allMessages", "agentUserMessage": "", @@ -525,7 +533,7 @@ "baseOptions": "", "allowImageUploads": "", "imageResolution": "low", - "reasoningEffort": "medium", + "reasoningEffort": "", "agentModel": "chatOpenAI" } }, @@ -540,250 +548,64 @@ "selected": false }, "type": "agentFlow", - "width": 182, - "height": 103, + "width": 175, + "height": 72, "selected": false, "positionAbsolute": { - "x": -62.25, - "y": 76 + "x": 216.75, + "y": 96.5 }, "dragging": false }, { - "id": "humanInputAgentflow_0", + "id": "stickyNoteAgentflow_0", "position": { - "x": 156.05666363734434, - "y": 86.62266545493773 + "x": 209.875, + "y": -61.25 }, "data": { - "id": "humanInputAgentflow_0", - "label": "Human Input 0", + "id": "stickyNoteAgentflow_0", + "label": "Sticky Note", "version": 1, - "name": "humanInputAgentflow", - "type": "HumanInput", - "color": "#6E6EFD", - "baseClasses": ["HumanInput"], + "name": "stickyNoteAgentflow", + "type": "StickyNote", + "color": "#fee440", + "baseClasses": ["StickyNote"], "category": "Agent Flows", - "description": "Request human input, approval or rejection during execution", + "description": "Add notes to the agent flow", "inputParams": [ { - "label": "Description Type", - "name": "humanInputDescriptionType", - "type": "options", - "options": [ - { - "label": "Fixed", - "name": "fixed", - "description": "Specify a fixed description" - }, - { - "label": "Dynamic", - "name": "dynamic", - "description": "Use LLM to generate a description" - } - ], - "id": "humanInputAgentflow_0-input-humanInputDescriptionType-options", - "display": true - }, - { - "label": "Description", - "name": "humanInputDescription", + "label": "", + "name": "note", "type": "string", - "placeholder": "Are you sure you want to proceed?", - "acceptVariable": true, - "rows": 4, - "show": { - "humanInputDescriptionType": "fixed" - }, - "id": "humanInputAgentflow_0-input-humanInputDescription-string", - "display": true - }, - { - "label": "Model", - "name": "humanInputModel", - "type": "asyncOptions", - "loadMethod": "listModels", - "loadConfig": true, - "show": { - "humanInputDescriptionType": "dynamic" - }, - "id": "humanInputAgentflow_0-input-humanInputModel-asyncOptions", - "display": false - }, - { - "label": "Prompt", - "name": "humanInputModelPrompt", - "type": "string", - "default": "

Summarize the conversation between the user and the assistant, reiterate the last message from the assistant, and ask if user would like to proceed or if they have any feedback.

\n
    \n
  • Begin by capturing the key points of the conversation, ensuring that you reflect the main ideas and themes discussed.
  • \n
  • Then, clearly reproduce the last message sent by the assistant to maintain continuity. Make sure the whole message is reproduced.
  • \n
  • Finally, ask the user if they would like to proceed, or provide any feedback on the last assistant message
  • \n
\n

Output Format The output should be structured in three parts in text:

\n
    \n
  • A summary of the conversation (1-3 sentences).
  • \n
  • The last assistant message (exactly as it appeared).
  • \n
  • Ask the user if they would like to proceed, or provide any feedback on last assistant message. No other explanation and elaboration is needed.
  • \n
\n", - "acceptVariable": true, - "generateInstruction": true, - "rows": 4, - "show": { - "humanInputDescriptionType": "dynamic" - }, - "id": "humanInputAgentflow_0-input-humanInputModelPrompt-string", - "display": false - }, - { - "label": "Enable Feedback", - "name": "humanInputEnableFeedback", - "type": "boolean", - "default": true, - "id": "humanInputAgentflow_0-input-humanInputEnableFeedback-boolean", + "rows": 1, + "placeholder": "Type something here", + "optional": true, + "id": "stickyNoteAgentflow_0-input-note-string", "display": true } ], "inputAnchors": [], "inputs": { - "humanInputDescriptionType": "fixed", - "humanInputEnableFeedback": true, - "humanInputModelConfig": { - "cache": "", - "modelName": "gpt-4o-mini", - "temperature": 0.9, - "streaming": true, - "maxTokens": "", - "topP": "", - "frequencyPenalty": "", - "presencePenalty": "", - "timeout": "", - "strictToolCalling": "", - "stopSequence": "", - "basepath": "", - "proxyUrl": "", - "baseOptions": "", - "allowImageUploads": "", - "imageResolution": "low", - "reasoningEffort": "medium", - "humanInputModel": "chatOpenAI" - }, - "humanInputDescription": "

Are you sure you want to proceed?

" + "note": "Agent can retrieve documents from upserted document store, and directly from vector database" }, "outputAnchors": [ { - "id": "humanInputAgentflow_0-output-0", - "label": "Human Input", - "name": "humanInputAgentflow" - }, - { - "id": "humanInputAgentflow_0-output-1", - "label": "Human Input", - "name": "humanInputAgentflow" + "id": "stickyNoteAgentflow_0-output-stickyNoteAgentflow", + "label": "Sticky Note", + "name": "stickyNoteAgentflow" } ], - "outputs": { - "humanInputAgentflow": "" - }, - "selected": false - }, - "type": "agentFlow", - "width": 161, - "height": 80, - "selected": false, - "positionAbsolute": { - "x": 156.05666363734434, - "y": 86.62266545493773 - }, - "dragging": false - }, - { - "id": "directReplyAgentflow_0", - "position": { - "x": 363.0101864947954, - "y": 35.15053748988734 - }, - "data": { - "id": "directReplyAgentflow_0", - "label": "Direct Reply 0", - "version": 1, - "name": "directReplyAgentflow", - "type": "DirectReply", - "color": "#4DDBBB", - "hideOutput": true, - "baseClasses": ["DirectReply"], - "category": "Agent Flows", - "description": "Directly reply to the user with a message", - "inputParams": [ - { - "label": "Message", - "name": "directReplyMessage", - "type": "string", - "rows": 4, - "acceptVariable": true, - "id": "directReplyAgentflow_0-input-directReplyMessage-string", - "display": true - } - ], - "inputAnchors": [], - "inputs": { - "directReplyMessage": "

{{ agentAgentflow_0 }}

" - }, - "outputAnchors": [], "outputs": {}, "selected": false }, - "type": "agentFlow", - "width": 155, - "height": 65, + "type": "stickyNote", + "width": 210, + "height": 143, "selected": false, "positionAbsolute": { - "x": 363.0101864947954, - "y": 35.15053748988734 - }, - "dragging": false - }, - { - "id": "loopAgentflow_0", - "position": { - "x": 366.5975521223236, - "y": 130.12266545493773 - }, - "data": { - "id": "loopAgentflow_0", - "label": "Loop 0", - "version": 1, - "name": "loopAgentflow", - "type": "Loop", - "color": "#FFA07A", - "hideOutput": true, - "baseClasses": ["Loop"], - "category": "Agent Flows", - "description": "Loop back to a previous node", - "inputParams": [ - { - "label": "Loop Back To", - "name": "loopBackToNode", - "type": "asyncOptions", - "loadMethod": "listPreviousNodes", - "freeSolo": true, - "id": "loopAgentflow_0-input-loopBackToNode-asyncOptions", - "display": true - }, - { - "label": "Max Loop Count", - "name": "maxLoopCount", - "type": "number", - "default": 5, - "id": "loopAgentflow_0-input-maxLoopCount-number", - "display": true - } - ], - "inputAnchors": [], - "inputs": { - "loopBackToNode": "agentAgentflow_0-Email Reply Agent", - "maxLoopCount": 5 - }, - "outputAnchors": [], - "outputs": {}, - "selected": false - }, - "type": "agentFlow", - "width": 113, - "height": 65, - "selected": false, - "positionAbsolute": { - "x": 366.5975521223236, - "y": 130.12266545493773 + "x": 209.875, + "y": -61.25 }, "dragging": false } @@ -801,47 +623,6 @@ }, "type": "agentFlow", "id": "startAgentflow_0-startAgentflow_0-output-startAgentflow-agentAgentflow_0-agentAgentflow_0" - }, - { - "source": "agentAgentflow_0", - "sourceHandle": "agentAgentflow_0-output-agentAgentflow", - "target": "humanInputAgentflow_0", - "targetHandle": "humanInputAgentflow_0", - "data": { - "sourceColor": "#4DD0E1", - "targetColor": "#6E6EFD", - "isHumanInput": false - }, - "type": "agentFlow", - "id": "agentAgentflow_0-agentAgentflow_0-output-agentAgentflow-humanInputAgentflow_0-humanInputAgentflow_0" - }, - { - "source": "humanInputAgentflow_0", - "sourceHandle": "humanInputAgentflow_0-output-0", - "target": "directReplyAgentflow_0", - "targetHandle": "directReplyAgentflow_0", - "data": { - "sourceColor": "#6E6EFD", - "targetColor": "#4DDBBB", - "edgeLabel": "proceed", - "isHumanInput": true - }, - "type": "agentFlow", - "id": "humanInputAgentflow_0-humanInputAgentflow_0-output-0-directReplyAgentflow_0-directReplyAgentflow_0" - }, - { - "source": "humanInputAgentflow_0", - "sourceHandle": "humanInputAgentflow_0-output-1", - "target": "loopAgentflow_0", - "targetHandle": "loopAgentflow_0", - "data": { - "sourceColor": "#6E6EFD", - "targetColor": "#FFA07A", - "edgeLabel": "reject", - "isHumanInput": true - }, - "type": "agentFlow", - "id": "humanInputAgentflow_0-humanInputAgentflow_0-output-1-loopAgentflow_0-loopAgentflow_0" } ] } diff --git a/packages/server/marketplaces/agentflowsv2/Structured Output.json b/packages/server/marketplaces/agentflowsv2/Structured Output.json new file mode 100644 index 000000000..d6de0e7c1 --- /dev/null +++ b/packages/server/marketplaces/agentflowsv2/Structured Output.json @@ -0,0 +1,549 @@ +{ + "description": "Return structured output from LLM", + "usecases": ["Extraction"], + "nodes": [ + { + "id": "startAgentflow_0", + "type": "agentFlow", + "position": { + "x": 64, + "y": 98.5 + }, + "data": { + "id": "startAgentflow_0", + "label": "Start", + "version": 1.1, + "name": "startAgentflow", + "type": "Start", + "color": "#7EE787", + "hideInput": true, + "baseClasses": ["Start"], + "category": "Agent Flows", + "description": "Starting point of the agentflow", + "inputParams": [ + { + "label": "Input Type", + "name": "startInputType", + "type": "options", + "options": [ + { + "label": "Chat Input", + "name": "chatInput", + "description": "Start the conversation with chat input" + }, + { + "label": "Form Input", + "name": "formInput", + "description": "Start the workflow with form inputs" + } + ], + "default": "chatInput", + "id": "startAgentflow_0-input-startInputType-options", + "display": true + }, + { + "label": "Form Title", + "name": "formTitle", + "type": "string", + "placeholder": "Please Fill Out The Form", + "show": { + "startInputType": "formInput" + }, + "id": "startAgentflow_0-input-formTitle-string", + "display": false + }, + { + "label": "Form Description", + "name": "formDescription", + "type": "string", + "placeholder": "Complete all fields below to continue", + "show": { + "startInputType": "formInput" + }, + "id": "startAgentflow_0-input-formDescription-string", + "display": false + }, + { + "label": "Form Input Types", + "name": "formInputTypes", + "description": "Specify the type of form input", + "type": "array", + "show": { + "startInputType": "formInput" + }, + "array": [ + { + "label": "Type", + "name": "type", + "type": "options", + "options": [ + { + "label": "String", + "name": "string" + }, + { + "label": "Number", + "name": "number" + }, + { + "label": "Boolean", + "name": "boolean" + }, + { + "label": "Options", + "name": "options" + } + ], + "default": "string" + }, + { + "label": "Label", + "name": "label", + "type": "string", + "placeholder": "Label for the input" + }, + { + "label": "Variable Name", + "name": "name", + "type": "string", + "placeholder": "Variable name for the input (must be camel case)", + "description": "Variable name must be camel case. For example: firstName, lastName, etc." + }, + { + "label": "Add Options", + "name": "addOptions", + "type": "array", + "show": { + "formInputTypes[$index].type": "options" + }, + "array": [ + { + "label": "Option", + "name": "option", + "type": "string" + } + ] + } + ], + "id": "startAgentflow_0-input-formInputTypes-array", + "display": false + }, + { + "label": "Ephemeral Memory", + "name": "startEphemeralMemory", + "type": "boolean", + "description": "Start fresh for every execution without past chat history", + "optional": true, + "id": "startAgentflow_0-input-startEphemeralMemory-boolean", + "display": true + }, + { + "label": "Flow State", + "name": "startState", + "description": "Runtime state during the execution of the workflow", + "type": "array", + "optional": true, + "array": [ + { + "label": "Key", + "name": "key", + "type": "string", + "placeholder": "Foo" + }, + { + "label": "Value", + "name": "value", + "type": "string", + "placeholder": "Bar", + "optional": true + } + ], + "id": "startAgentflow_0-input-startState-array", + "display": true + }, + { + "label": "Persist State", + "name": "startPersistState", + "type": "boolean", + "description": "Persist the state in the same session", + "optional": true, + "id": "startAgentflow_0-input-startPersistState-boolean", + "display": true + } + ], + "inputAnchors": [], + "inputs": { + "startInputType": "chatInput", + "formTitle": "", + "formDescription": "", + "formInputTypes": "", + "startEphemeralMemory": "", + "startState": "", + "startPersistState": "" + }, + "outputAnchors": [ + { + "id": "startAgentflow_0-output-startAgentflow", + "label": "Start", + "name": "startAgentflow" + } + ], + "outputs": {}, + "selected": false + }, + "width": 103, + "height": 66, + "positionAbsolute": { + "x": 64, + "y": 98.5 + }, + "selected": false, + "dragging": false + }, + { + "id": "llmAgentflow_0", + "position": { + "x": 234.5, + "y": 95.75 + }, + "data": { + "id": "llmAgentflow_0", + "label": "Strutured Output", + "version": 1, + "name": "llmAgentflow", + "type": "LLM", + "color": "#64B5F6", + "baseClasses": ["LLM"], + "category": "Agent Flows", + "description": "Large language models to analyze user-provided inputs and generate responses", + "inputParams": [ + { + "label": "Model", + "name": "llmModel", + "type": "asyncOptions", + "loadMethod": "listModels", + "loadConfig": true, + "id": "llmAgentflow_0-input-llmModel-asyncOptions", + "display": true + }, + { + "label": "Messages", + "name": "llmMessages", + "type": "array", + "optional": true, + "acceptVariable": true, + "array": [ + { + "label": "Role", + "name": "role", + "type": "options", + "options": [ + { + "label": "System", + "name": "system" + }, + { + "label": "Assistant", + "name": "assistant" + }, + { + "label": "Developer", + "name": "developer" + }, + { + "label": "User", + "name": "user" + } + ] + }, + { + "label": "Content", + "name": "content", + "type": "string", + "acceptVariable": true, + "generateInstruction": true, + "rows": 4 + } + ], + "id": "llmAgentflow_0-input-llmMessages-array", + "display": true + }, + { + "label": "Enable Memory", + "name": "llmEnableMemory", + "type": "boolean", + "description": "Enable memory for the conversation thread", + "default": true, + "optional": true, + "id": "llmAgentflow_0-input-llmEnableMemory-boolean", + "display": true + }, + { + "label": "Memory Type", + "name": "llmMemoryType", + "type": "options", + "options": [ + { + "label": "All Messages", + "name": "allMessages", + "description": "Retrieve all messages from the conversation" + }, + { + "label": "Window Size", + "name": "windowSize", + "description": "Uses a fixed window size to surface the last N messages" + }, + { + "label": "Conversation Summary", + "name": "conversationSummary", + "description": "Summarizes the whole conversation" + }, + { + "label": "Conversation Summary Buffer", + "name": "conversationSummaryBuffer", + "description": "Summarize conversations once token limit is reached. Default to 2000" + } + ], + "optional": true, + "default": "allMessages", + "show": { + "llmEnableMemory": true + }, + "id": "llmAgentflow_0-input-llmMemoryType-options", + "display": false + }, + { + "label": "Window Size", + "name": "llmMemoryWindowSize", + "type": "number", + "default": "20", + "description": "Uses a fixed window size to surface the last N messages", + "show": { + "llmMemoryType": "windowSize" + }, + "id": "llmAgentflow_0-input-llmMemoryWindowSize-number", + "display": false + }, + { + "label": "Max Token Limit", + "name": "llmMemoryMaxTokenLimit", + "type": "number", + "default": "2000", + "description": "Summarize conversations once token limit is reached. Default to 2000", + "show": { + "llmMemoryType": "conversationSummaryBuffer" + }, + "id": "llmAgentflow_0-input-llmMemoryMaxTokenLimit-number", + "display": false + }, + { + "label": "Input Message", + "name": "llmUserMessage", + "type": "string", + "description": "Add an input message as user message at the end of the conversation", + "rows": 4, + "optional": true, + "acceptVariable": true, + "show": { + "llmEnableMemory": true + }, + "id": "llmAgentflow_0-input-llmUserMessage-string", + "display": false + }, + { + "label": "Return Response As", + "name": "llmReturnResponseAs", + "type": "options", + "options": [ + { + "label": "User Message", + "name": "userMessage" + }, + { + "label": "Assistant Message", + "name": "assistantMessage" + } + ], + "default": "userMessage", + "id": "llmAgentflow_0-input-llmReturnResponseAs-options", + "display": true + }, + { + "label": "JSON Structured Output", + "name": "llmStructuredOutput", + "description": "Instruct the LLM to give output in a JSON structured schema", + "type": "array", + "optional": true, + "acceptVariable": true, + "array": [ + { + "label": "Key", + "name": "key", + "type": "string" + }, + { + "label": "Type", + "name": "type", + "type": "options", + "options": [ + { + "label": "String", + "name": "string" + }, + { + "label": "String Array", + "name": "stringArray" + }, + { + "label": "Number", + "name": "number" + }, + { + "label": "Boolean", + "name": "boolean" + }, + { + "label": "Enum", + "name": "enum" + }, + { + "label": "JSON Array", + "name": "jsonArray" + } + ] + }, + { + "label": "Enum Values", + "name": "enumValues", + "type": "string", + "placeholder": "value1, value2, value3", + "description": "Enum values. Separated by comma", + "optional": true, + "show": { + "llmStructuredOutput[$index].type": "enum" + } + }, + { + "label": "JSON Schema", + "name": "jsonSchema", + "type": "code", + "placeholder": "{\n \"answer\": {\n \"type\": \"string\",\n \"description\": \"Value of the answer\"\n },\n \"reason\": {\n \"type\": \"string\",\n \"description\": \"Reason for the answer\"\n },\n \"optional\": {\n \"type\": \"boolean\"\n },\n \"count\": {\n \"type\": \"number\"\n },\n \"children\": {\n \"type\": \"array\",\n \"items\": {\n \"type\": \"object\",\n \"properties\": {\n \"value\": {\n \"type\": \"string\",\n \"description\": \"Value of the children's answer\"\n }\n }\n }\n }\n}", + "description": "JSON schema for the structured output", + "optional": true, + "show": { + "llmStructuredOutput[$index].type": "jsonArray" + } + }, + { + "label": "Description", + "name": "description", + "type": "string", + "placeholder": "Description of the key" + } + ], + "id": "llmAgentflow_0-input-llmStructuredOutput-array", + "display": true + }, + { + "label": "Update Flow State", + "name": "llmUpdateState", + "description": "Update runtime state during the execution of the workflow", + "type": "array", + "optional": true, + "acceptVariable": true, + "array": [ + { + "label": "Key", + "name": "key", + "type": "asyncOptions", + "loadMethod": "listRuntimeStateKeys", + "freeSolo": true + }, + { + "label": "Value", + "name": "value", + "type": "string", + "acceptVariable": true, + "acceptNodeOutputAsVariable": true + } + ], + "id": "llmAgentflow_0-input-llmUpdateState-array", + "display": true + } + ], + "inputAnchors": [], + "inputs": { + "llmModel": "chatAnthropic", + "llmMessages": [ + { + "role": "system", + "content": "

Given user query, return result only in JSON format, without exception.

When asked to self-correct, output only the corrected JSON and no other text.

" + }, + { + "role": "user", + "content": "

{{ question }}

" + } + ], + "llmEnableMemory": false, + "llmReturnResponseAs": "userMessage", + "llmStructuredOutput": [ + { + "key": "output", + "type": "jsonArray", + "enumValues": "", + "jsonSchema": "{\n \"answer\": {\n \"type\": \"string\",\n \"description\": \"Value of the answer\"\n },\n \"reason\": {\n \"type\": \"string\",\n \"description\": \"Reason for the answer\"\n }\n}", + "description": "answer and its reason to the question" + } + ], + "llmUpdateState": "", + "llmModelConfig": { + "credential": "", + "modelName": "claude-sonnet-4-0", + "temperature": 0.9, + "streaming": true, + "maxTokensToSample": "", + "topP": "", + "topK": "", + "extendedThinking": "", + "budgetTokens": 1024, + "allowImageUploads": "", + "llmModel": "chatAnthropic" + } + }, + "outputAnchors": [ + { + "id": "llmAgentflow_0-output-llmAgentflow", + "label": "LLM", + "name": "llmAgentflow" + } + ], + "outputs": {}, + "selected": false + }, + "type": "agentFlow", + "width": 213, + "height": 72, + "selected": false, + "positionAbsolute": { + "x": 234.5, + "y": 95.75 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "startAgentflow_0", + "sourceHandle": "startAgentflow_0-output-startAgentflow", + "target": "llmAgentflow_0", + "targetHandle": "llmAgentflow_0", + "data": { + "sourceColor": "#7EE787", + "targetColor": "#64B5F6", + "isHumanInput": false + }, + "type": "agentFlow", + "id": "startAgentflow_0-startAgentflow_0-output-startAgentflow-llmAgentflow_0-llmAgentflow_0" + } + ] +} diff --git a/packages/server/marketplaces/agentflowsv2/Supervisor Worker.json b/packages/server/marketplaces/agentflowsv2/Supervisor Worker.json index dbf60b335..1c8d86c9b 100644 --- a/packages/server/marketplaces/agentflowsv2/Supervisor Worker.json +++ b/packages/server/marketplaces/agentflowsv2/Supervisor Worker.json @@ -6,13 +6,13 @@ "id": "startAgentflow_0", "type": "agentFlow", "position": { - "x": -234.25083179589828, - "y": 89.8928676312403 + "x": -198.4357561998925, + "y": 90.62378754136287 }, "data": { "id": "startAgentflow_0", "label": "Start", - "version": 1, + "version": 1.1, "name": "startAgentflow", "type": "Start", "color": "#7EE787", @@ -157,6 +157,15 @@ ], "id": "startAgentflow_0-input-startState-array", "display": true + }, + { + "label": "Persist State", + "name": "startPersistState", + "type": "boolean", + "description": "Persist the state in the same session", + "optional": true, + "id": "startAgentflow_0-input-startPersistState-boolean", + "display": true } ], "inputAnchors": [], @@ -186,12 +195,12 @@ "outputs": {}, "selected": false }, - "width": 101, - "height": 65, + "width": 103, + "height": 66, "selected": false, "positionAbsolute": { - "x": -234.25083179589828, - "y": 89.8928676312403 + "x": -198.4357561998925, + "y": 90.62378754136287 }, "dragging": false }, @@ -474,7 +483,7 @@ "selected": false }, "type": "agentFlow", - "width": 184, + "width": 194, "height": 100, "selected": false, "positionAbsolute": { @@ -778,11 +787,11 @@ ], "inputAnchors": [], "inputs": { - "agentModel": "azureChatOpenAI", + "agentModel": "chatOpenAI", "agentMessages": [ { "role": "system", - "content": "

As a Senior Software Engineer, you are a pivotal part of our innovative development team. Your expertise and leadership drive the creation of robust, scalable software solutions that meet the needs of our diverse clientele. By applying best practices in software development, you ensure that our products are reliable, efficient, and maintainable.

Your goal is to lead the development of high-quality software solutions.

Utilize your deep technical knowledge and experience to architect, design, and implement software systems that address complex problems. Collaborate closely with other engineers, reviewers to ensure that the solutions you develop align with business objectives and user needs.

Design and implement new feature for the given task, ensuring it integrates seamlessly with existing systems and meets performance requirements. Use your understanding of {technology} to build this feature. Make sure to adhere to our coding standards and follow best practices.

The output should be a fully functional, well-documented feature that enhances our product's capabilities. Include detailed comments in the code. Pass the code to Quality Assurance Engineer for review if neccessary. Once ther review is good enough, produce a finalized version of the code.

" + "content": "

As a Senior Software Engineer, you are a pivotal part of our innovative development team. Your expertise and leadership drive the creation of robust, scalable software solutions that meet the needs of our diverse clientele. By applying best practices in software development, you ensure that our products are reliable, efficient, and maintainable.

Your goal is to lead the development of high-quality software solutions.

Utilize your deep technical knowledge and experience to architect, design, and implement software systems that address complex problems. Collaborate closely with other engineers, reviewers to ensure that the solutions you develop align with business objectives and user needs.

Design and implement new feature for the given task, ensuring it integrates seamlessly with existing systems and meets performance requirements. Use your understanding of React, Tailwindcss, NodeJS to build this feature. Make sure to adhere to our coding standards and follow best practices.

The output should be a fully functional, well-documented feature that enhances our product's capabilities. Include detailed comments in the code.

" } ], "agentTools": "", @@ -794,20 +803,23 @@ "agentUpdateState": "", "agentModelConfig": { "credential": "", - "modelName": "gpt-4.1", + "modelName": "gpt-4o-mini", "temperature": 0.9, - "maxTokens": "", "streaming": true, + "maxTokens": "", "topP": "", "frequencyPenalty": "", "presencePenalty": "", "timeout": "", + "strictToolCalling": "", + "stopSequence": "", "basepath": "", + "proxyUrl": "", "baseOptions": "", "allowImageUploads": "", "imageResolution": "low", - "reasoningEffort": "medium", - "agentModel": "azureChatOpenAI" + "reasoningEffort": "", + "agentModel": "chatOpenAI" } }, "outputAnchors": [ @@ -821,8 +833,8 @@ "selected": false }, "type": "agentFlow", - "width": 183, - "height": 71, + "width": 191, + "height": 72, "selected": false, "positionAbsolute": { "x": 352.5679347768288, @@ -833,8 +845,8 @@ { "id": "agentAgentflow_2", "position": { - "x": 358.5981605238689, - "y": 87.38558154725587 + "x": 359.32908043399146, + "y": 88.11650145737843 }, "data": { "id": "agentAgentflow_2", @@ -1125,11 +1137,11 @@ ], "inputAnchors": [], "inputs": { - "agentModel": "chatDeepseek", + "agentModel": "chatOpenAI", "agentMessages": [ { "role": "system", - "content": "

As a Quality Assurance Engineer, you are an integral part of our development team, ensuring that our software products are of the highest quality. Your meticulous attention to detail and expertise in testing methodologies are crucial in identifying defects and ensuring that our code meets the highest standards.

Your goal is to ensure the delivery of high-quality software through thorough code review and testing.

Review the codebase for the new feature designed and implemented by the Senior Software Engineer. Your expertise goes beyond mere code inspection; you are adept at ensuring that developments not only function as intended but also adhere to the team's coding standards, enhance maintainability, and seamlessly integrate with existing systems.

With a deep appreciation for collaborative development, you provide constructive feedback, guiding contributors towards best practices and fostering a culture of continuous improvement. Your meticulous approach to reviewing code, coupled with your ability to foresee potential issues and recommend proactive solutions, ensures the delivery of high-quality software that is robust, scalable, and aligned with the team's strategic goals.

Always pass back the review and feedback to Senior Software Engineer.

" + "content": "

As a Quality Assurance Engineer, you are an integral part of our development team, ensuring that our software products are of the highest quality. Your meticulous attention to detail and expertise in testing methodologies are crucial in identifying defects and ensuring that our code meets the highest standards.

Your goal is to ensure the delivery of high-quality software through thorough code review and testing.

Review the codebase for the new feature designed and implemented by the Senior Software Engineer. Your expertise goes beyond mere code inspection; you are adept at ensuring that developments not only function as intended but also adhere to the team's coding standards, enhance maintainability, and seamlessly integrate with existing systems.

With a deep appreciation for collaborative development, you provide constructive feedback, guiding contributors towards best practices and fostering a culture of continuous improvement. Your meticulous approach to reviewing code, coupled with your ability to foresee potential issues and recommend proactive solutions, ensures the delivery of high-quality software that is robust, scalable, and aligned with the team's strategic goals.

" } ], "agentTools": "", @@ -1141,17 +1153,23 @@ "agentUpdateState": "", "agentModelConfig": { "credential": "", - "modelName": "deepseek-reasoner", - "temperature": 0.7, + "modelName": "gpt-4o-mini", + "temperature": 0.9, "streaming": true, "maxTokens": "", "topP": "", "frequencyPenalty": "", "presencePenalty": "", "timeout": "", + "strictToolCalling": "", "stopSequence": "", + "basepath": "", + "proxyUrl": "", "baseOptions": "", - "agentModel": "chatDeepseek" + "allowImageUploads": "", + "imageResolution": "low", + "reasoningEffort": "", + "agentModel": "chatOpenAI" } }, "outputAnchors": [ @@ -1165,12 +1183,12 @@ "selected": false }, "type": "agentFlow", - "width": 206, - "height": 71, + "width": 175, + "height": 72, "selected": false, "positionAbsolute": { - "x": 358.5981605238689, - "y": 87.38558154725587 + "x": 359.32908043399146, + "y": 88.11650145737843 }, "dragging": false }, @@ -1469,27 +1487,29 @@ ], "inputAnchors": [], "inputs": { - "agentModel": "chatAnthropic", + "agentModel": "chatGoogleGenerativeAI", "agentMessages": "", "agentTools": "", "agentKnowledgeDocumentStores": "", "agentEnableMemory": true, "agentMemoryType": "allMessages", - "agentUserMessage": "

Given the above conversations, generate a detail solution developed by the software engineer and code reviewer. Include full code, improvements and review.

", + "agentUserMessage": "

Given the above conversations, generate a detail solution developed by the software engineer and code reviewer.

Your guiding principles:

  1. Preserve Full Context
    Include all code implementations, improvements and review from the conversation. Do not omit, summarize, or oversimplify key information.

  2. Markdown Output Only
    Your final output must be in Markdown format.

", "agentReturnResponseAs": "userMessage", "agentUpdateState": "", "agentModelConfig": { "credential": "", - "modelName": "claude-3-7-sonnet-latest", + "modelName": "gemini-2.5-flash-preview-05-20", + "customModelName": "", "temperature": 0.9, "streaming": true, - "maxTokensToSample": "", + "maxOutputTokens": "", "topP": "", "topK": "", - "extendedThinking": "", - "budgetTokens": 1024, + "harmCategory": "", + "harmBlockThreshold": "", + "baseUrl": "", "allowImageUploads": "", - "agentModel": "chatAnthropic" + "agentModel": "chatGoogleGenerativeAI" } }, "outputAnchors": [ @@ -1503,8 +1523,8 @@ "selected": false }, "type": "agentFlow", - "width": 231, - "height": 71, + "width": 283, + "height": 72, "selected": false, "positionAbsolute": { "x": 357.60470406099364, @@ -1515,8 +1535,8 @@ { "id": "loopAgentflow_0", "position": { - "x": 574.050701666824, - "y": -20.0960840521807 + "x": 572.5888618465789, + "y": -20.827003962303266 }, "data": { "id": "loopAgentflow_0", @@ -1558,20 +1578,20 @@ "selected": false }, "type": "agentFlow", - "width": 186, - "height": 65, + "width": 195, + "height": 66, "selected": false, "dragging": false, "positionAbsolute": { - "x": 574.050701666824, - "y": -20.0960840521807 + "x": 572.5888618465789, + "y": -20.827003962303266 } }, { "id": "loopAgentflow_1", "position": { - "x": 600.379151793432, - "y": 90.25732743474846 + "x": 566.7568359277939, + "y": 90.98824734487103 }, "data": { "id": "loopAgentflow_1", @@ -1613,20 +1633,20 @@ "selected": false }, "type": "agentFlow", - "width": 186, - "height": 65, + "width": 195, + "height": 66, "selected": false, "dragging": false, "positionAbsolute": { - "x": 600.379151793432, - "y": 90.25732743474846 + "x": 566.7568359277939, + "y": 90.98824734487103 } }, { "id": "llmAgentflow_0", "position": { - "x": -78.28788541792727, - "y": 87.1528514813091 + "x": -60.01488766486309, + "y": 87.88377139143167 }, "data": { "id": "llmAgentflow_0", @@ -1941,7 +1961,7 @@ ], "llmModelConfig": { "cache": "", - "modelName": "gpt-4o-mini", + "modelName": "gpt-4.1", "temperature": 0.9, "streaming": true, "maxTokens": "", @@ -1955,8 +1975,7 @@ "proxyUrl": "", "baseOptions": "", "allowImageUploads": "", - "imageResolution": "low", - "reasoningEffort": "medium", + "reasoningEffort": "", "llmModel": "chatOpenAI" } }, @@ -1971,12 +1990,12 @@ "selected": false }, "type": "agentFlow", - "width": 168, - "height": 71, + "width": 148, + "height": 72, "selected": false, "positionAbsolute": { - "x": -78.28788541792727, - "y": 87.1528514813091 + "x": -60.01488766486309, + "y": 87.88377139143167 }, "dragging": false } diff --git a/packages/server/marketplaces/agentflowsv2/Translator.json b/packages/server/marketplaces/agentflowsv2/Translator.json new file mode 100644 index 000000000..6d29804e8 --- /dev/null +++ b/packages/server/marketplaces/agentflowsv2/Translator.json @@ -0,0 +1,544 @@ +{ + "description": "Translate text from one language to another", + "usecases": ["Basic"], + "nodes": [ + { + "id": "startAgentflow_0", + "type": "agentFlow", + "position": { + "x": 64, + "y": 98.5 + }, + "data": { + "id": "startAgentflow_0", + "label": "Start", + "version": 1.1, + "name": "startAgentflow", + "type": "Start", + "color": "#7EE787", + "hideInput": true, + "baseClasses": ["Start"], + "category": "Agent Flows", + "description": "Starting point of the agentflow", + "inputParams": [ + { + "label": "Input Type", + "name": "startInputType", + "type": "options", + "options": [ + { + "label": "Chat Input", + "name": "chatInput", + "description": "Start the conversation with chat input" + }, + { + "label": "Form Input", + "name": "formInput", + "description": "Start the workflow with form inputs" + } + ], + "default": "chatInput", + "id": "startAgentflow_0-input-startInputType-options", + "display": true + }, + { + "label": "Form Title", + "name": "formTitle", + "type": "string", + "placeholder": "Please Fill Out The Form", + "show": { + "startInputType": "formInput" + }, + "id": "startAgentflow_0-input-formTitle-string", + "display": false + }, + { + "label": "Form Description", + "name": "formDescription", + "type": "string", + "placeholder": "Complete all fields below to continue", + "show": { + "startInputType": "formInput" + }, + "id": "startAgentflow_0-input-formDescription-string", + "display": false + }, + { + "label": "Form Input Types", + "name": "formInputTypes", + "description": "Specify the type of form input", + "type": "array", + "show": { + "startInputType": "formInput" + }, + "array": [ + { + "label": "Type", + "name": "type", + "type": "options", + "options": [ + { + "label": "String", + "name": "string" + }, + { + "label": "Number", + "name": "number" + }, + { + "label": "Boolean", + "name": "boolean" + }, + { + "label": "Options", + "name": "options" + } + ], + "default": "string" + }, + { + "label": "Label", + "name": "label", + "type": "string", + "placeholder": "Label for the input" + }, + { + "label": "Variable Name", + "name": "name", + "type": "string", + "placeholder": "Variable name for the input (must be camel case)", + "description": "Variable name must be camel case. For example: firstName, lastName, etc." + }, + { + "label": "Add Options", + "name": "addOptions", + "type": "array", + "show": { + "formInputTypes[$index].type": "options" + }, + "array": [ + { + "label": "Option", + "name": "option", + "type": "string" + } + ] + } + ], + "id": "startAgentflow_0-input-formInputTypes-array", + "display": false + }, + { + "label": "Ephemeral Memory", + "name": "startEphemeralMemory", + "type": "boolean", + "description": "Start fresh for every execution without past chat history", + "optional": true, + "id": "startAgentflow_0-input-startEphemeralMemory-boolean", + "display": true + }, + { + "label": "Flow State", + "name": "startState", + "description": "Runtime state during the execution of the workflow", + "type": "array", + "optional": true, + "array": [ + { + "label": "Key", + "name": "key", + "type": "string", + "placeholder": "Foo" + }, + { + "label": "Value", + "name": "value", + "type": "string", + "placeholder": "Bar", + "optional": true + } + ], + "id": "startAgentflow_0-input-startState-array", + "display": true + }, + { + "label": "Persist State", + "name": "startPersistState", + "type": "boolean", + "description": "Persist the state in the same session", + "optional": true, + "id": "startAgentflow_0-input-startPersistState-boolean", + "display": true + } + ], + "inputAnchors": [], + "inputs": { + "startInputType": "chatInput", + "formTitle": "", + "formDescription": "", + "formInputTypes": "", + "startEphemeralMemory": "", + "startState": "", + "startPersistState": "" + }, + "outputAnchors": [ + { + "id": "startAgentflow_0-output-startAgentflow", + "label": "Start", + "name": "startAgentflow" + } + ], + "outputs": {}, + "selected": false + }, + "width": 103, + "height": 66, + "positionAbsolute": { + "x": 64, + "y": 98.5 + }, + "selected": false, + "dragging": false + }, + { + "id": "llmAgentflow_0", + "position": { + "x": 234.5, + "y": 96.25 + }, + "data": { + "id": "llmAgentflow_0", + "label": "Translator", + "version": 1, + "name": "llmAgentflow", + "type": "LLM", + "color": "#64B5F6", + "baseClasses": ["LLM"], + "category": "Agent Flows", + "description": "Large language models to analyze user-provided inputs and generate responses", + "inputParams": [ + { + "label": "Model", + "name": "llmModel", + "type": "asyncOptions", + "loadMethod": "listModels", + "loadConfig": true, + "id": "llmAgentflow_0-input-llmModel-asyncOptions", + "display": true + }, + { + "label": "Messages", + "name": "llmMessages", + "type": "array", + "optional": true, + "acceptVariable": true, + "array": [ + { + "label": "Role", + "name": "role", + "type": "options", + "options": [ + { + "label": "System", + "name": "system" + }, + { + "label": "Assistant", + "name": "assistant" + }, + { + "label": "Developer", + "name": "developer" + }, + { + "label": "User", + "name": "user" + } + ] + }, + { + "label": "Content", + "name": "content", + "type": "string", + "acceptVariable": true, + "generateInstruction": true, + "rows": 4 + } + ], + "id": "llmAgentflow_0-input-llmMessages-array", + "display": true + }, + { + "label": "Enable Memory", + "name": "llmEnableMemory", + "type": "boolean", + "description": "Enable memory for the conversation thread", + "default": true, + "optional": true, + "id": "llmAgentflow_0-input-llmEnableMemory-boolean", + "display": true + }, + { + "label": "Memory Type", + "name": "llmMemoryType", + "type": "options", + "options": [ + { + "label": "All Messages", + "name": "allMessages", + "description": "Retrieve all messages from the conversation" + }, + { + "label": "Window Size", + "name": "windowSize", + "description": "Uses a fixed window size to surface the last N messages" + }, + { + "label": "Conversation Summary", + "name": "conversationSummary", + "description": "Summarizes the whole conversation" + }, + { + "label": "Conversation Summary Buffer", + "name": "conversationSummaryBuffer", + "description": "Summarize conversations once token limit is reached. Default to 2000" + } + ], + "optional": true, + "default": "allMessages", + "show": { + "llmEnableMemory": true + }, + "id": "llmAgentflow_0-input-llmMemoryType-options", + "display": false + }, + { + "label": "Window Size", + "name": "llmMemoryWindowSize", + "type": "number", + "default": "20", + "description": "Uses a fixed window size to surface the last N messages", + "show": { + "llmMemoryType": "windowSize" + }, + "id": "llmAgentflow_0-input-llmMemoryWindowSize-number", + "display": false + }, + { + "label": "Max Token Limit", + "name": "llmMemoryMaxTokenLimit", + "type": "number", + "default": "2000", + "description": "Summarize conversations once token limit is reached. Default to 2000", + "show": { + "llmMemoryType": "conversationSummaryBuffer" + }, + "id": "llmAgentflow_0-input-llmMemoryMaxTokenLimit-number", + "display": false + }, + { + "label": "Input Message", + "name": "llmUserMessage", + "type": "string", + "description": "Add an input message as user message at the end of the conversation", + "rows": 4, + "optional": true, + "acceptVariable": true, + "show": { + "llmEnableMemory": true + }, + "id": "llmAgentflow_0-input-llmUserMessage-string", + "display": false + }, + { + "label": "Return Response As", + "name": "llmReturnResponseAs", + "type": "options", + "options": [ + { + "label": "User Message", + "name": "userMessage" + }, + { + "label": "Assistant Message", + "name": "assistantMessage" + } + ], + "default": "userMessage", + "id": "llmAgentflow_0-input-llmReturnResponseAs-options", + "display": true + }, + { + "label": "JSON Structured Output", + "name": "llmStructuredOutput", + "description": "Instruct the LLM to give output in a JSON structured schema", + "type": "array", + "optional": true, + "acceptVariable": true, + "array": [ + { + "label": "Key", + "name": "key", + "type": "string" + }, + { + "label": "Type", + "name": "type", + "type": "options", + "options": [ + { + "label": "String", + "name": "string" + }, + { + "label": "String Array", + "name": "stringArray" + }, + { + "label": "Number", + "name": "number" + }, + { + "label": "Boolean", + "name": "boolean" + }, + { + "label": "Enum", + "name": "enum" + }, + { + "label": "JSON Array", + "name": "jsonArray" + } + ] + }, + { + "label": "Enum Values", + "name": "enumValues", + "type": "string", + "placeholder": "value1, value2, value3", + "description": "Enum values. Separated by comma", + "optional": true, + "show": { + "llmStructuredOutput[$index].type": "enum" + } + }, + { + "label": "JSON Schema", + "name": "jsonSchema", + "type": "code", + "placeholder": "{\n \"answer\": {\n \"type\": \"string\",\n \"description\": \"Value of the answer\"\n },\n \"reason\": {\n \"type\": \"string\",\n \"description\": \"Reason for the answer\"\n },\n \"optional\": {\n \"type\": \"boolean\"\n },\n \"count\": {\n \"type\": \"number\"\n },\n \"children\": {\n \"type\": \"array\",\n \"items\": {\n \"type\": \"object\",\n \"properties\": {\n \"value\": {\n \"type\": \"string\",\n \"description\": \"Value of the children's answer\"\n }\n }\n }\n }\n}", + "description": "JSON schema for the structured output", + "optional": true, + "show": { + "llmStructuredOutput[$index].type": "jsonArray" + } + }, + { + "label": "Description", + "name": "description", + "type": "string", + "placeholder": "Description of the key" + } + ], + "id": "llmAgentflow_0-input-llmStructuredOutput-array", + "display": true + }, + { + "label": "Update Flow State", + "name": "llmUpdateState", + "description": "Update runtime state during the execution of the workflow", + "type": "array", + "optional": true, + "acceptVariable": true, + "array": [ + { + "label": "Key", + "name": "key", + "type": "asyncOptions", + "loadMethod": "listRuntimeStateKeys", + "freeSolo": true + }, + { + "label": "Value", + "name": "value", + "type": "string", + "acceptVariable": true, + "acceptNodeOutputAsVariable": true + } + ], + "id": "llmAgentflow_0-input-llmUpdateState-array", + "display": true + } + ], + "inputAnchors": [], + "inputs": { + "llmModel": "chatGoogleGenerativeAI", + "llmMessages": [ + { + "role": "system", + "content": "

You are a helpful assistant that translates English to Japanese language. Return only Japanese language

" + }, + { + "role": "user", + "content": "

English:

{{ question }}

Japanese:

" + } + ], + "llmEnableMemory": false, + "llmReturnResponseAs": "userMessage", + "llmStructuredOutput": "", + "llmUpdateState": "", + "llmModelConfig": { + "cache": "", + "contextCache": "", + "modelName": "gemini-2.0-flash", + "customModelName": "", + "temperature": 0.9, + "streaming": true, + "maxOutputTokens": "", + "topP": "", + "topK": "", + "harmCategory": "", + "harmBlockThreshold": "", + "baseUrl": "", + "allowImageUploads": "", + "llmModel": "chatGoogleGenerativeAI" + } + }, + "outputAnchors": [ + { + "id": "llmAgentflow_0-output-llmAgentflow", + "label": "LLM", + "name": "llmAgentflow" + } + ], + "outputs": {}, + "selected": false + }, + "type": "agentFlow", + "width": 200, + "height": 72, + "selected": false, + "positionAbsolute": { + "x": 234.5, + "y": 96.25 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "startAgentflow_0", + "sourceHandle": "startAgentflow_0-output-startAgentflow", + "target": "llmAgentflow_0", + "targetHandle": "llmAgentflow_0", + "data": { + "sourceColor": "#7EE787", + "targetColor": "#64B5F6", + "isHumanInput": false + }, + "type": "agentFlow", + "id": "startAgentflow_0-startAgentflow_0-output-startAgentflow-llmAgentflow_0-llmAgentflow_0" + } + ] +} diff --git a/packages/server/marketplaces/agentflowsv2/Slack Agent.json b/packages/server/marketplaces/agentflowsv2/Workplace Chat.json similarity index 78% rename from packages/server/marketplaces/agentflowsv2/Slack Agent.json rename to packages/server/marketplaces/agentflowsv2/Workplace Chat.json index cd30db646..4cc327021 100644 --- a/packages/server/marketplaces/agentflowsv2/Slack Agent.json +++ b/packages/server/marketplaces/agentflowsv2/Workplace Chat.json @@ -1,5 +1,5 @@ { - "description": "An agent that can post message to Slack channel", + "description": "An agent that can post AI responses to Workplace channels like Slack and Teams", "usecases": ["Agent"], "nodes": [ { @@ -12,7 +12,7 @@ "data": { "id": "startAgentflow_0", "label": "Start", - "version": 1, + "version": 1.1, "name": "startAgentflow", "type": "Start", "color": "#7EE787", @@ -157,6 +157,15 @@ ], "id": "startAgentflow_0-input-startState-array", "display": true + }, + { + "label": "Persist State", + "name": "startPersistState", + "type": "boolean", + "description": "Persist the state in the same session", + "optional": true, + "id": "startAgentflow_0-input-startPersistState-boolean", + "display": true } ], "inputAnchors": [], @@ -177,8 +186,8 @@ "outputs": {}, "selected": false }, - "width": 101, - "height": 65, + "width": 103, + "height": 66, "selected": false, "positionAbsolute": { "x": -192.5, @@ -484,7 +493,7 @@ "baseOptions": "", "allowImageUploads": "", "imageResolution": "low", - "reasoningEffort": "medium", + "reasoningEffort": "", "llmModel": "chatOpenAI" } }, @@ -499,8 +508,8 @@ "selected": false }, "type": "agentFlow", - "width": 168, - "height": 71, + "width": 175, + "height": 72, "selected": false, "positionAbsolute": { "x": -31.25, @@ -511,13 +520,13 @@ { "id": "toolAgentflow_0", "position": { - "x": 182.75, - "y": 64.5 + "x": 181.67112630208328, + "y": 28.357731119791666 }, "data": { "id": "toolAgentflow_0", - "label": "Slack Reply", - "version": 1, + "label": "Post to Slack", + "version": 1.1, "name": "toolAgentflow", "type": "Tool", "color": "#d4a373", @@ -527,11 +536,11 @@ "inputParams": [ { "label": "Tool", - "name": "selectedTool", + "name": "toolAgentflowSelectedTool", "type": "asyncOptions", "loadMethod": "listTools", "loadConfig": true, - "id": "toolAgentflow_0-input-selectedTool-asyncOptions", + "id": "toolAgentflow_0-input-toolAgentflowSelectedTool-asyncOptions", "display": true }, { @@ -556,7 +565,7 @@ } ], "show": { - "selectedTool": ".+" + "toolAgentflowSelectedTool": ".+" }, "id": "toolAgentflow_0-input-toolInputArgs-array", "display": true @@ -590,7 +599,7 @@ ], "inputAnchors": [], "inputs": { - "selectedTool": "slackMCP", + "toolAgentflowSelectedTool": "slackMCP", "toolInputArgs": [ { "inputArgName": "channel_id", @@ -602,9 +611,9 @@ } ], "toolUpdateState": "", - "selectedToolConfig": { + "toolAgentflowSelectedToolConfig": { "mcpActions": "[\"slack_post_message\"]", - "selectedTool": "slackMCP" + "toolAgentflowSelectedTool": "slackMCP" } }, "outputAnchors": [ @@ -618,20 +627,20 @@ "selected": false }, "type": "agentFlow", - "width": 142, - "height": 71, + "width": 156, + "height": 68, "selected": false, "positionAbsolute": { - "x": 182.75, - "y": 64.5 + "x": 181.67112630208328, + "y": 28.357731119791666 }, "dragging": false }, { "id": "directReplyAgentflow_0", "position": { - "x": 366.75, - "y": 67.5 + "x": 373.22324218750003, + "y": 66.96056315104161 }, "data": { "id": "directReplyAgentflow_0", @@ -664,12 +673,138 @@ "selected": false }, "type": "agentFlow", - "width": 194, - "height": 65, + "width": 204, + "height": 66, "selected": false, "positionAbsolute": { - "x": 366.75, - "y": 67.5 + "x": 373.22324218750003, + "y": 66.96056315104161 + }, + "dragging": false + }, + { + "id": "toolAgentflow_1", + "position": { + "x": 177.461181640625, + "y": 108.73382161458332 + }, + "data": { + "id": "toolAgentflow_1", + "label": "Post to Teams", + "version": 1.1, + "name": "toolAgentflow", + "type": "Tool", + "color": "#d4a373", + "baseClasses": ["Tool"], + "category": "Agent Flows", + "description": "Tools allow LLM to interact with external systems", + "inputParams": [ + { + "label": "Tool", + "name": "toolAgentflowSelectedTool", + "type": "asyncOptions", + "loadMethod": "listTools", + "loadConfig": true, + "id": "toolAgentflow_1-input-toolAgentflowSelectedTool-asyncOptions", + "display": true + }, + { + "label": "Tool Input Arguments", + "name": "toolInputArgs", + "type": "array", + "acceptVariable": true, + "refresh": true, + "array": [ + { + "label": "Input Argument Name", + "name": "inputArgName", + "type": "asyncOptions", + "loadMethod": "listToolInputArgs", + "refresh": true + }, + { + "label": "Input Argument Value", + "name": "inputArgValue", + "type": "string", + "acceptVariable": true + } + ], + "show": { + "toolAgentflowSelectedTool": ".+" + }, + "id": "toolAgentflow_1-input-toolInputArgs-array", + "display": true + }, + { + "label": "Update Flow State", + "name": "toolUpdateState", + "description": "Update runtime state during the execution of the workflow", + "type": "array", + "optional": true, + "acceptVariable": true, + "array": [ + { + "label": "Key", + "name": "key", + "type": "asyncOptions", + "loadMethod": "listRuntimeStateKeys", + "freeSolo": true + }, + { + "label": "Value", + "name": "value", + "type": "string", + "acceptVariable": true, + "acceptNodeOutputAsVariable": true + } + ], + "id": "toolAgentflow_1-input-toolUpdateState-array", + "display": true + } + ], + "inputAnchors": [], + "inputs": { + "toolAgentflowSelectedTool": "microsoftTeams", + "toolInputArgs": [ + { + "inputArgName": "teamId", + "inputArgValue": "

<your-team-id>

" + }, + { + "inputArgName": "chatChannelId", + "inputArgValue": "

<your-channel-id>

" + }, + { + "inputArgName": "messageBody", + "inputArgValue": "

{{ llmAgentflow_0 }}

" + } + ], + "toolUpdateState": "", + "toolAgentflowSelectedToolConfig": { + "credential": "", + "teamsType": "chatMessage", + "chatMessageActions": "[\"sendMessage\"]", + "toolAgentflowSelectedTool": "microsoftTeams", + "chatChannelIdSendMessage": "ABCDEFG" + } + }, + "outputAnchors": [ + { + "id": "toolAgentflow_1-output-toolAgentflow", + "label": "Tool", + "name": "toolAgentflow" + } + ], + "outputs": {}, + "selected": false + }, + "type": "agentFlow", + "width": 163, + "height": 68, + "selected": false, + "positionAbsolute": { + "x": 177.461181640625, + "y": 108.73382161458332 }, "dragging": false } @@ -713,6 +848,32 @@ }, "type": "agentFlow", "id": "toolAgentflow_0-toolAgentflow_0-output-toolAgentflow-directReplyAgentflow_0-directReplyAgentflow_0" + }, + { + "source": "llmAgentflow_0", + "sourceHandle": "llmAgentflow_0-output-llmAgentflow", + "target": "toolAgentflow_1", + "targetHandle": "toolAgentflow_1", + "data": { + "sourceColor": "#64B5F6", + "targetColor": "#d4a373", + "isHumanInput": false + }, + "type": "agentFlow", + "id": "llmAgentflow_0-llmAgentflow_0-output-llmAgentflow-toolAgentflow_1-toolAgentflow_1" + }, + { + "source": "toolAgentflow_1", + "sourceHandle": "toolAgentflow_1-output-toolAgentflow", + "target": "directReplyAgentflow_0", + "targetHandle": "directReplyAgentflow_0", + "data": { + "sourceColor": "#d4a373", + "targetColor": "#4DDBBB", + "isHumanInput": false + }, + "type": "agentFlow", + "id": "toolAgentflow_1-toolAgentflow_1-output-toolAgentflow-directReplyAgentflow_0-directReplyAgentflow_0" } ] } diff --git a/packages/server/marketplaces/chatflows/Antonym.json b/packages/server/marketplaces/chatflows/Antonym.json deleted file mode 100644 index d32b40dcc..000000000 --- a/packages/server/marketplaces/chatflows/Antonym.json +++ /dev/null @@ -1,533 +0,0 @@ -{ - "description": "Output antonym of given user input using few-shot prompt template built with examples", - "framework": ["Langchain"], - "usecases": ["Basic"], - "nodes": [ - { - "width": 300, - "height": 956, - "id": "fewShotPromptTemplate_1", - "position": { - "x": 886.3229032369354, - "y": -32.18537399495787 - }, - "type": "customNode", - "data": { - "id": "fewShotPromptTemplate_1", - "label": "Few Shot Prompt Template", - "version": 1, - "name": "fewShotPromptTemplate", - "type": "FewShotPromptTemplate", - "baseClasses": ["FewShotPromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"], - "category": "Prompts", - "description": "Prompt template you can build with examples", - "inputParams": [ - { - "label": "Examples", - "name": "examples", - "type": "string", - "rows": 4, - "placeholder": "[\n { \"word\": \"happy\", \"antonym\": \"sad\" },\n { \"word\": \"tall\", \"antonym\": \"short\" },\n]", - "id": "fewShotPromptTemplate_1-input-examples-string" - }, - { - "label": "Prefix", - "name": "prefix", - "type": "string", - "rows": 4, - "placeholder": "Give the antonym of every input", - "id": "fewShotPromptTemplate_1-input-prefix-string" - }, - { - "label": "Suffix", - "name": "suffix", - "type": "string", - "rows": 4, - "placeholder": "Word: {input}\nAntonym:", - "id": "fewShotPromptTemplate_1-input-suffix-string" - }, - { - "label": "Example Separator", - "name": "exampleSeparator", - "type": "string", - "placeholder": "\n\n", - "id": "fewShotPromptTemplate_1-input-exampleSeparator-string" - }, - { - "label": "Template Format", - "name": "templateFormat", - "type": "options", - "options": [ - { - "label": "f-string", - "name": "f-string" - }, - { - "label": "jinja-2", - "name": "jinja-2" - } - ], - "default": "f-string", - "id": "fewShotPromptTemplate_1-input-templateFormat-options" - } - ], - "inputAnchors": [ - { - "label": "Example Prompt", - "name": "examplePrompt", - "type": "PromptTemplate", - "id": "fewShotPromptTemplate_1-input-examplePrompt-PromptTemplate" - } - ], - "inputs": { - "examples": "[\n { \"word\": \"happy\", \"antonym\": \"sad\" },\n { \"word\": \"tall\", \"antonym\": \"short\" }\n]", - "examplePrompt": "{{promptTemplate_0.data.instance}}", - "prefix": "Give the antonym of every input", - "suffix": "Word: {input}\\nAntonym:", - "exampleSeparator": "\\n\\n", - "templateFormat": "f-string" - }, - "outputAnchors": [ - { - "id": "fewShotPromptTemplate_1-output-fewShotPromptTemplate-FewShotPromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", - "name": "fewShotPromptTemplate", - "label": "FewShotPromptTemplate", - "type": "FewShotPromptTemplate | BaseStringPromptTemplate | BasePromptTemplate" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 886.3229032369354, - "y": -32.18537399495787 - }, - "dragging": false - }, - { - "width": 300, - "height": 513, - "id": "promptTemplate_0", - "position": { - "x": 540.0140796251119, - "y": -33.31673494170347 - }, - "type": "customNode", - "data": { - "id": "promptTemplate_0", - "label": "Prompt Template", - "version": 1, - "name": "promptTemplate", - "type": "PromptTemplate", - "baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"], - "category": "Prompts", - "description": "Schema to represent a basic prompt for an LLM", - "inputParams": [ - { - "label": "Template", - "name": "template", - "type": "string", - "rows": 4, - "placeholder": "What is a good name for a company that makes {product}?", - "id": "promptTemplate_0-input-template-string" - }, - { - "label": "Format Prompt Values", - "name": "promptValues", - "type": "json", - "optional": true, - "acceptVariable": true, - "list": true, - "id": "promptTemplate_0-input-promptValues-json" - } - ], - "inputAnchors": [], - "inputs": { - "template": "Word: {word}\\nAntonym: {antonym}\\n", - "promptValues": "" - }, - "outputAnchors": [ - { - "id": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", - "name": "promptTemplate", - "label": "PromptTemplate", - "type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 540.0140796251119, - "y": -33.31673494170347 - }, - "dragging": false - }, - { - "width": 300, - "height": 508, - "id": "llmChain_0", - "position": { - "x": 1609.3428158423485, - "y": 409.3763727612179 - }, - "type": "customNode", - "data": { - "id": "llmChain_0", - "label": "LLM Chain", - "version": 3, - "name": "llmChain", - "type": "LLMChain", - "baseClasses": ["LLMChain", "BaseChain", "Runnable"], - "category": "Chains", - "description": "Chain to run queries against LLMs", - "inputParams": [ - { - "label": "Chain Name", - "name": "chainName", - "type": "string", - "placeholder": "Name Your Chain", - "optional": true, - "id": "llmChain_0-input-chainName-string" - } - ], - "inputAnchors": [ - { - "label": "Language Model", - "name": "model", - "type": "BaseLanguageModel", - "id": "llmChain_0-input-model-BaseLanguageModel" - }, - { - "label": "Prompt", - "name": "prompt", - "type": "BasePromptTemplate", - "id": "llmChain_0-input-prompt-BasePromptTemplate" - }, - { - "label": "Output Parser", - "name": "outputParser", - "type": "BaseLLMOutputParser", - "optional": true, - "id": "llmChain_0-input-outputParser-BaseLLMOutputParser" - }, - { - "label": "Input Moderation", - "description": "Detect text that could generate harmful output and prevent it from being sent to the language model", - "name": "inputModeration", - "type": "Moderation", - "optional": true, - "list": true, - "id": "llmChain_0-input-inputModeration-Moderation" - } - ], - "inputs": { - "model": "{{chatOpenAI_0.data.instance}}", - "prompt": "{{fewShotPromptTemplate_1.data.instance}}", - "outputParser": "", - "chainName": "", - "inputModeration": "" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "options": [ - { - "id": "llmChain_0-output-llmChain-LLMChain|BaseChain|Runnable", - "name": "llmChain", - "label": "LLM Chain", - "type": "LLMChain | BaseChain | Runnable" - }, - { - "id": "llmChain_0-output-outputPrediction-string|json", - "name": "outputPrediction", - "label": "Output Prediction", - "type": "string | json" - } - ], - "default": "llmChain" - } - ], - "outputs": { - "output": "llmChain" - }, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 1609.3428158423485, - "y": 409.3763727612179 - }, - "dragging": false - }, - { - "id": "chatOpenAI_0", - "position": { - "x": 1220.4459070421062, - "y": -80.75004891987845 - }, - "type": "customNode", - "data": { - "id": "chatOpenAI_0", - "label": "ChatOpenAI", - "version": 6, - "name": "chatOpenAI", - "type": "ChatOpenAI", - "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "Runnable"], - "category": "Chat Models", - "description": "Wrapper around OpenAI large language models that use the Chat endpoint", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["openAIApi"], - "id": "chatOpenAI_0-input-credential-credential" - }, - { - "label": "Model Name", - "name": "modelName", - "type": "asyncOptions", - "loadMethod": "listModels", - "default": "gpt-3.5-turbo", - "id": "chatOpenAI_0-input-modelName-asyncOptions" - }, - { - "label": "Temperature", - "name": "temperature", - "type": "number", - "step": 0.1, - "default": 0.9, - "optional": true, - "id": "chatOpenAI_0-input-temperature-number" - }, - { - "label": "Max Tokens", - "name": "maxTokens", - "type": "number", - "step": 1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-maxTokens-number" - }, - { - "label": "Top Probability", - "name": "topP", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-topP-number" - }, - { - "label": "Frequency Penalty", - "name": "frequencyPenalty", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-frequencyPenalty-number" - }, - { - "label": "Presence Penalty", - "name": "presencePenalty", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-presencePenalty-number" - }, - { - "label": "Timeout", - "name": "timeout", - "type": "number", - "step": 1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-timeout-number" - }, - { - "label": "BasePath", - "name": "basepath", - "type": "string", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-basepath-string" - }, - { - "label": "BaseOptions", - "name": "baseOptions", - "type": "json", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-baseOptions-json" - }, - { - "label": "Allow Image Uploads", - "name": "allowImageUploads", - "type": "boolean", - "description": "Automatically uses gpt-4-vision-preview when image is being uploaded from chat. Only works with LLMChain, Conversation Chain, ReAct Agent, and Conversational Agent", - "default": false, - "optional": true, - "id": "chatOpenAI_0-input-allowImageUploads-boolean" - }, - { - "label": "Image Resolution", - "description": "This parameter controls the resolution in which the model views the image.", - "name": "imageResolution", - "type": "options", - "options": [ - { - "label": "Low", - "name": "low" - }, - { - "label": "High", - "name": "high" - }, - { - "label": "Auto", - "name": "auto" - } - ], - "default": "low", - "optional": false, - "additionalParams": true, - "id": "chatOpenAI_0-input-imageResolution-options" - } - ], - "inputAnchors": [ - { - "label": "Cache", - "name": "cache", - "type": "BaseCache", - "optional": true, - "id": "chatOpenAI_0-input-cache-BaseCache" - } - ], - "inputs": { - "cache": "", - "modelName": "gpt-3.5-turbo", - "temperature": 0.9, - "maxTokens": "", - "topP": "", - "frequencyPenalty": "", - "presencePenalty": "", - "timeout": "", - "basepath": "", - "baseOptions": "", - "allowImageUploads": "", - "imageResolution": "low" - }, - "outputAnchors": [ - { - "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", - "name": "chatOpenAI", - "label": "ChatOpenAI", - "description": "Wrapper around OpenAI large language models that use the Chat endpoint", - "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | Runnable" - } - ], - "outputs": {}, - "selected": false - }, - "width": 300, - "height": 670, - "selected": false, - "positionAbsolute": { - "x": 1220.4459070421062, - "y": -80.75004891987845 - }, - "dragging": false - }, - { - "id": "stickyNote_0", - "position": { - "x": 1607.723380325684, - "y": 245.15558433515412 - }, - "type": "stickyNote", - "data": { - "id": "stickyNote_0", - "label": "Sticky Note", - "version": 2, - "name": "stickyNote", - "type": "StickyNote", - "baseClasses": ["StickyNote"], - "tags": ["Utilities"], - "category": "Utilities", - "description": "Add a sticky note", - "inputParams": [ - { - "label": "", - "name": "note", - "type": "string", - "rows": 1, - "placeholder": "Type something here", - "optional": true, - "id": "stickyNote_0-input-note-string" - } - ], - "inputAnchors": [], - "inputs": { - "note": "Using few shot examples, we let LLM learns from the examples.\n\nThis template showcase how we can let LLM gives output as an antonym for given input" - }, - "outputAnchors": [ - { - "id": "stickyNote_0-output-stickyNote-StickyNote", - "name": "stickyNote", - "label": "StickyNote", - "description": "Add a sticky note", - "type": "StickyNote" - } - ], - "outputs": {}, - "selected": false - }, - "width": 300, - "height": 143, - "selected": false, - "positionAbsolute": { - "x": 1607.723380325684, - "y": 245.15558433515412 - }, - "dragging": false - } - ], - "edges": [ - { - "source": "promptTemplate_0", - "sourceHandle": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", - "target": "fewShotPromptTemplate_1", - "targetHandle": "fewShotPromptTemplate_1-input-examplePrompt-PromptTemplate", - "type": "buttonedge", - "id": "promptTemplate_0-promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-fewShotPromptTemplate_1-fewShotPromptTemplate_1-input-examplePrompt-PromptTemplate", - "data": { - "label": "" - } - }, - { - "source": "fewShotPromptTemplate_1", - "sourceHandle": "fewShotPromptTemplate_1-output-fewShotPromptTemplate-FewShotPromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", - "target": "llmChain_0", - "targetHandle": "llmChain_0-input-prompt-BasePromptTemplate", - "type": "buttonedge", - "id": "fewShotPromptTemplate_1-fewShotPromptTemplate_1-output-fewShotPromptTemplate-FewShotPromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_0-llmChain_0-input-prompt-BasePromptTemplate", - "data": { - "label": "" - } - }, - { - "source": "chatOpenAI_0", - "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", - "target": "llmChain_0", - "targetHandle": "llmChain_0-input-model-BaseLanguageModel", - "type": "buttonedge", - "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-llmChain_0-llmChain_0-input-model-BaseLanguageModel" - } - ] -} diff --git a/packages/server/marketplaces/chatflows/Context Chat Engine.json b/packages/server/marketplaces/chatflows/Context Chat Engine.json index 90f64eb48..e87cd650f 100644 --- a/packages/server/marketplaces/chatflows/Context Chat Engine.json +++ b/packages/server/marketplaces/chatflows/Context Chat Engine.json @@ -1,7 +1,6 @@ { "description": "Answer question based on retrieved documents (context) while remembering previous conversations", "framework": ["LlamaIndex"], - "badge": "POPULAR", "usecases": ["Documents QnA"], "nodes": [ { diff --git a/packages/server/marketplaces/chatflows/Conversation Chain.json b/packages/server/marketplaces/chatflows/Conversation Chain.json index 0e57174b8..c28d615d5 100644 --- a/packages/server/marketplaces/chatflows/Conversation Chain.json +++ b/packages/server/marketplaces/chatflows/Conversation Chain.json @@ -1,8 +1,7 @@ { "description": "Basic example of Conversation Chain with built-in memory - works exactly like ChatGPT", - "usecases": ["Chatbot"], + "usecases": ["Basic"], "framework": ["Langchain"], - "badge": "POPULAR", "nodes": [ { "width": 300, diff --git a/packages/server/marketplaces/chatflows/Conversational Retrieval QA Chain.json b/packages/server/marketplaces/chatflows/Conversational Retrieval QA Chain.json index dbdb5cb65..6e6a3ebb8 100644 --- a/packages/server/marketplaces/chatflows/Conversational Retrieval QA Chain.json +++ b/packages/server/marketplaces/chatflows/Conversational Retrieval QA Chain.json @@ -1,6 +1,5 @@ { "description": "Documents QnA using Retrieval Augmented Generation (RAG) with Mistral and FAISS for similarity search", - "badge": "POPULAR", "usecases": ["Documents QnA"], "framework": ["Langchain"], "nodes": [ diff --git a/packages/server/marketplaces/chatflows/Flowise Docs QnA.json b/packages/server/marketplaces/chatflows/Github Docs QnA.json similarity index 99% rename from packages/server/marketplaces/chatflows/Flowise Docs QnA.json rename to packages/server/marketplaces/chatflows/Github Docs QnA.json index 0d0c87dd2..21e781208 100644 --- a/packages/server/marketplaces/chatflows/Flowise Docs QnA.json +++ b/packages/server/marketplaces/chatflows/Github Docs QnA.json @@ -1,6 +1,5 @@ { - "description": "Flowise Docs Github QnA using Retrieval Augmented Generation (RAG)", - "badge": "POPULAR", + "description": "Github Docs QnA using Retrieval Augmented Generation (RAG)", "usecases": ["Documents QnA"], "framework": ["Langchain"], "nodes": [ diff --git a/packages/server/marketplaces/chatflows/IfElse.json b/packages/server/marketplaces/chatflows/IfElse.json deleted file mode 100644 index ff163da26..000000000 --- a/packages/server/marketplaces/chatflows/IfElse.json +++ /dev/null @@ -1,1246 +0,0 @@ -{ - "description": "Split flows based on if else condition", - "framework": ["Langchain"], - "usecases": ["Basic"], - "nodes": [ - { - "width": 300, - "height": 513, - "id": "promptTemplate_0", - "position": { - "x": 792.9464838535649, - "y": 527.1718536712464 - }, - "type": "customNode", - "data": { - "id": "promptTemplate_0", - "label": "Prompt Template", - "version": 1, - "name": "promptTemplate", - "type": "PromptTemplate", - "baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"], - "category": "Prompts", - "description": "Schema to represent a basic prompt for an LLM", - "inputParams": [ - { - "label": "Template", - "name": "template", - "type": "string", - "rows": 4, - "placeholder": "What is a good name for a company that makes {product}?", - "id": "promptTemplate_0-input-template-string" - }, - { - "label": "Format Prompt Values", - "name": "promptValues", - "type": "json", - "optional": true, - "acceptVariable": true, - "list": true, - "id": "promptTemplate_0-input-promptValues-json" - } - ], - "inputAnchors": [], - "inputs": { - "template": "You are an AI who performs one task based on the following objective: {objective}.\nRespond with how you would complete this task:", - "promptValues": "{\"objective\":\"{{question}}\"}" - }, - "outputAnchors": [ - { - "id": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", - "name": "promptTemplate", - "label": "PromptTemplate", - "type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 792.9464838535649, - "y": 527.1718536712464 - }, - "dragging": false - }, - { - "width": 300, - "height": 513, - "id": "promptTemplate_1", - "position": { - "x": 1995.1328578238122, - "y": -14.648035759690174 - }, - "type": "customNode", - "data": { - "id": "promptTemplate_1", - "label": "Prompt Template", - "version": 1, - "name": "promptTemplate", - "type": "PromptTemplate", - "baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"], - "category": "Prompts", - "description": "Schema to represent a basic prompt for an LLM", - "inputParams": [ - { - "label": "Template", - "name": "template", - "type": "string", - "rows": 4, - "placeholder": "What is a good name for a company that makes {product}?", - "id": "promptTemplate_1-input-template-string" - }, - { - "label": "Format Prompt Values", - "name": "promptValues", - "type": "json", - "optional": true, - "acceptVariable": true, - "list": true, - "id": "promptTemplate_1-input-promptValues-json" - } - ], - "inputAnchors": [], - "inputs": { - "template": "You are a task creation AI that uses the result of an execution agent to create new tasks with the following objective: {objective}.\nThe last completed task has the result: {result}.\nBased on the result, create new tasks to be completed by the AI system that do not overlap with result.\nReturn the tasks as an array.", - "promptValues": "{\"objective\":\"{{question}}\",\"result\":\"{{ifElseFunction_0.data.instance}}\"}" - }, - "outputAnchors": [ - { - "id": "promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", - "name": "promptTemplate", - "label": "PromptTemplate", - "type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate" - } - ], - "outputs": {}, - "selected": false - }, - "positionAbsolute": { - "x": 1995.1328578238122, - "y": -14.648035759690174 - }, - "selected": false, - "dragging": false - }, - { - "width": 300, - "height": 508, - "id": "llmChain_0", - "position": { - "x": 1183.0899727188096, - "y": 385.0159960992951 - }, - "type": "customNode", - "data": { - "id": "llmChain_0", - "label": "LLM Chain", - "version": 3, - "name": "llmChain", - "type": "LLMChain", - "baseClasses": ["LLMChain", "BaseChain", "Runnable"], - "category": "Chains", - "description": "Chain to run queries against LLMs", - "inputParams": [ - { - "label": "Chain Name", - "name": "chainName", - "type": "string", - "placeholder": "Name Your Chain", - "optional": true, - "id": "llmChain_0-input-chainName-string" - } - ], - "inputAnchors": [ - { - "label": "Language Model", - "name": "model", - "type": "BaseLanguageModel", - "id": "llmChain_0-input-model-BaseLanguageModel" - }, - { - "label": "Prompt", - "name": "prompt", - "type": "BasePromptTemplate", - "id": "llmChain_0-input-prompt-BasePromptTemplate" - }, - { - "label": "Output Parser", - "name": "outputParser", - "type": "BaseLLMOutputParser", - "optional": true, - "id": "llmChain_0-input-outputParser-BaseLLMOutputParser" - }, - { - "label": "Input Moderation", - "description": "Detect text that could generate harmful output and prevent it from being sent to the language model", - "name": "inputModeration", - "type": "Moderation", - "optional": true, - "list": true, - "id": "llmChain_0-input-inputModeration-Moderation" - } - ], - "inputs": { - "model": "{{chatOpenAI_2.data.instance}}", - "prompt": "{{promptTemplate_0.data.instance}}", - "outputParser": "", - "chainName": "FirstChain", - "inputModeration": "" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "options": [ - { - "id": "llmChain_0-output-llmChain-LLMChain|BaseChain|Runnable", - "name": "llmChain", - "label": "LLM Chain", - "type": "LLMChain | BaseChain | Runnable" - }, - { - "id": "llmChain_0-output-outputPrediction-string|json", - "name": "outputPrediction", - "label": "Output Prediction", - "type": "string | json" - } - ], - "default": "llmChain" - } - ], - "outputs": { - "output": "outputPrediction" - }, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 1183.0899727188096, - "y": 385.0159960992951 - }, - "dragging": false - }, - { - "width": 300, - "height": 508, - "id": "llmChain_1", - "position": { - "x": 2773.675809586143, - "y": 114.39482869328754 - }, - "type": "customNode", - "data": { - "id": "llmChain_1", - "label": "LLM Chain", - "version": 3, - "name": "llmChain", - "type": "LLMChain", - "baseClasses": ["LLMChain", "BaseChain", "Runnable"], - "category": "Chains", - "description": "Chain to run queries against LLMs", - "inputParams": [ - { - "label": "Chain Name", - "name": "chainName", - "type": "string", - "placeholder": "Name Your Chain", - "optional": true, - "id": "llmChain_1-input-chainName-string" - } - ], - "inputAnchors": [ - { - "label": "Language Model", - "name": "model", - "type": "BaseLanguageModel", - "id": "llmChain_1-input-model-BaseLanguageModel" - }, - { - "label": "Prompt", - "name": "prompt", - "type": "BasePromptTemplate", - "id": "llmChain_1-input-prompt-BasePromptTemplate" - }, - { - "label": "Output Parser", - "name": "outputParser", - "type": "BaseLLMOutputParser", - "optional": true, - "id": "llmChain_1-input-outputParser-BaseLLMOutputParser" - }, - { - "label": "Input Moderation", - "description": "Detect text that could generate harmful output and prevent it from being sent to the language model", - "name": "inputModeration", - "type": "Moderation", - "optional": true, - "list": true, - "id": "llmChain_1-input-inputModeration-Moderation" - } - ], - "inputs": { - "model": "{{chatOpenAI_1.data.instance}}", - "prompt": "{{promptTemplate_1.data.instance}}", - "outputParser": "", - "chainName": "SuccessChain", - "inputModeration": "" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "options": [ - { - "id": "llmChain_1-output-llmChain-LLMChain|BaseChain|Runnable", - "name": "llmChain", - "label": "LLM Chain", - "type": "LLMChain | BaseChain | Runnable" - }, - { - "id": "llmChain_1-output-outputPrediction-string|json", - "name": "outputPrediction", - "label": "Output Prediction", - "type": "string | json" - } - ], - "default": "llmChain" - } - ], - "outputs": { - "output": "llmChain" - }, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 2773.675809586143, - "y": 114.39482869328754 - }, - "dragging": false - }, - { - "width": 300, - "height": 513, - "id": "promptTemplate_2", - "position": { - "x": 1992.5456174373144, - "y": 675.5277193898106 - }, - "type": "customNode", - "data": { - "id": "promptTemplate_2", - "label": "Prompt Template", - "version": 1, - "name": "promptTemplate", - "type": "PromptTemplate", - "baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"], - "category": "Prompts", - "description": "Schema to represent a basic prompt for an LLM", - "inputParams": [ - { - "label": "Template", - "name": "template", - "type": "string", - "rows": 4, - "placeholder": "What is a good name for a company that makes {product}?", - "id": "promptTemplate_2-input-template-string" - }, - { - "label": "Format Prompt Values", - "name": "promptValues", - "type": "json", - "optional": true, - "acceptVariable": true, - "list": true, - "id": "promptTemplate_2-input-promptValues-json" - } - ], - "inputAnchors": [], - "inputs": { - "template": "Politely say \"I'm not able to answer the query\"", - "promptValues": "{\"objective\":\"{{question}}\",\"result\":\"\"}" - }, - "outputAnchors": [ - { - "id": "promptTemplate_2-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", - "name": "promptTemplate", - "label": "PromptTemplate", - "type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate" - } - ], - "outputs": {}, - "selected": false - }, - "positionAbsolute": { - "x": 1992.5456174373144, - "y": 675.5277193898106 - }, - "selected": false, - "dragging": false - }, - { - "width": 300, - "height": 508, - "id": "llmChain_2", - "position": { - "x": 2800.114465373451, - "y": 909.2318348964463 - }, - "type": "customNode", - "data": { - "id": "llmChain_2", - "label": "LLM Chain", - "version": 3, - "name": "llmChain", - "type": "LLMChain", - "baseClasses": ["LLMChain", "BaseChain", "Runnable"], - "category": "Chains", - "description": "Chain to run queries against LLMs", - "inputParams": [ - { - "label": "Chain Name", - "name": "chainName", - "type": "string", - "placeholder": "Name Your Chain", - "optional": true, - "id": "llmChain_2-input-chainName-string" - } - ], - "inputAnchors": [ - { - "label": "Language Model", - "name": "model", - "type": "BaseLanguageModel", - "id": "llmChain_2-input-model-BaseLanguageModel" - }, - { - "label": "Prompt", - "name": "prompt", - "type": "BasePromptTemplate", - "id": "llmChain_2-input-prompt-BasePromptTemplate" - }, - { - "label": "Output Parser", - "name": "outputParser", - "type": "BaseLLMOutputParser", - "optional": true, - "id": "llmChain_2-input-outputParser-BaseLLMOutputParser" - }, - { - "label": "Input Moderation", - "description": "Detect text that could generate harmful output and prevent it from being sent to the language model", - "name": "inputModeration", - "type": "Moderation", - "optional": true, - "list": true, - "id": "llmChain_2-input-inputModeration-Moderation" - } - ], - "inputs": { - "model": "{{chatAnthropic_0.data.instance}}", - "prompt": "{{promptTemplate_2.data.instance}}", - "outputParser": "", - "chainName": "FallbackChain", - "inputModeration": "" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "options": [ - { - "id": "llmChain_2-output-llmChain-LLMChain|BaseChain|Runnable", - "name": "llmChain", - "label": "LLM Chain", - "type": "LLMChain | BaseChain | Runnable" - }, - { - "id": "llmChain_2-output-outputPrediction-string|json", - "name": "outputPrediction", - "label": "Output Prediction", - "type": "string | json" - } - ], - "default": "llmChain" - } - ], - "outputs": { - "output": "llmChain" - }, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 2800.114465373451, - "y": 909.2318348964463 - }, - "dragging": false - }, - { - "width": 300, - "height": 757, - "id": "ifElseFunction_0", - "position": { - "x": 1590.6560099561739, - "y": 265.36655719326177 - }, - "type": "customNode", - "data": { - "id": "ifElseFunction_0", - "label": "IfElse Function", - "version": 2, - "name": "ifElseFunction", - "type": "IfElseFunction", - "baseClasses": ["IfElseFunction", "Utilities"], - "tags": ["Utilities"], - "category": "Utilities", - "description": "Split flows based on If Else javascript functions", - "inputParams": [ - { - "label": "Input Variables", - "name": "functionInputVariables", - "description": "Input variables can be used in the function with prefix $. For example: $var", - "type": "json", - "optional": true, - "acceptVariable": true, - "list": true, - "id": "ifElseFunction_0-input-functionInputVariables-json" - }, - { - "label": "IfElse Name", - "name": "functionName", - "type": "string", - "optional": true, - "placeholder": "If Condition Match", - "id": "ifElseFunction_0-input-functionName-string" - }, - { - "label": "If Function", - "name": "ifFunction", - "description": "Function must return a value", - "type": "code", - "rows": 2, - "default": "if (\"hello\" == \"hello\") {\n return true;\n}", - "id": "ifElseFunction_0-input-ifFunction-code" - }, - { - "label": "Else Function", - "name": "elseFunction", - "description": "Function must return a value", - "type": "code", - "rows": 2, - "default": "return false;", - "id": "ifElseFunction_0-input-elseFunction-code" - } - ], - "inputAnchors": [], - "inputs": { - "functionInputVariables": "{\"task\":\"{{llmChain_0.data.instance}}\"}", - "functionName": "If Condition Match", - "ifFunction": "if ($task.includes(\"task\")) {\n // return $task to be used in next prompt as variable\n return $task;\n}", - "elseFunction": "return false;" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "description": "", - "options": [ - { - "id": "ifElseFunction_0-output-returnTrue-string|number|boolean|json|array", - "name": "returnTrue", - "label": "True", - "description": "", - "type": "string | number | boolean | json | array" - }, - { - "id": "ifElseFunction_0-output-returnFalse-string|number|boolean|json|array", - "name": "returnFalse", - "label": "False", - "description": "", - "type": "string | number | boolean | json | array" - } - ], - "default": "returnTrue" - } - ], - "outputs": { - "output": "returnTrue" - }, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 1590.6560099561739, - "y": 265.36655719326177 - }, - "dragging": false - }, - { - "width": 300, - "height": 670, - "id": "chatOpenAI_1", - "position": { - "x": 2351.7234095119156, - "y": -394.0409300837044 - }, - "type": "customNode", - "data": { - "id": "chatOpenAI_1", - "label": "ChatOpenAI", - "version": 6, - "name": "chatOpenAI", - "type": "ChatOpenAI", - "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "Runnable"], - "category": "Chat Models", - "description": "Wrapper around OpenAI large language models that use the Chat endpoint", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["openAIApi"], - "id": "chatOpenAI_1-input-credential-credential" - }, - { - "label": "Model Name", - "name": "modelName", - "type": "asyncOptions", - "loadMethod": "listModels", - "default": "gpt-3.5-turbo", - "id": "chatOpenAI_1-input-modelName-options" - }, - { - "label": "Temperature", - "name": "temperature", - "type": "number", - "step": 0.1, - "default": 0.9, - "optional": true, - "id": "chatOpenAI_1-input-temperature-number" - }, - { - "label": "Max Tokens", - "name": "maxTokens", - "type": "number", - "step": 1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_1-input-maxTokens-number" - }, - { - "label": "Top Probability", - "name": "topP", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_1-input-topP-number" - }, - { - "label": "Frequency Penalty", - "name": "frequencyPenalty", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_1-input-frequencyPenalty-number" - }, - { - "label": "Presence Penalty", - "name": "presencePenalty", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_1-input-presencePenalty-number" - }, - { - "label": "Timeout", - "name": "timeout", - "type": "number", - "step": 1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_1-input-timeout-number" - }, - { - "label": "BasePath", - "name": "basepath", - "type": "string", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_1-input-basepath-string" - }, - { - "label": "BaseOptions", - "name": "baseOptions", - "type": "json", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_1-input-baseOptions-json" - }, - { - "label": "Allow Image Uploads", - "name": "allowImageUploads", - "type": "boolean", - "description": "Automatically uses gpt-4-vision-preview when image is being uploaded from chat. Only works with LLMChain, Conversation Chain, ReAct Agent, and Conversational Agent", - "default": false, - "optional": true, - "id": "chatOpenAI_1-input-allowImageUploads-boolean" - }, - { - "label": "Image Resolution", - "description": "This parameter controls the resolution in which the model views the image.", - "name": "imageResolution", - "type": "options", - "options": [ - { - "label": "Low", - "name": "low" - }, - { - "label": "High", - "name": "high" - }, - { - "label": "Auto", - "name": "auto" - } - ], - "default": "low", - "optional": false, - "additionalParams": true, - "id": "chatOpenAI_1-input-imageResolution-options" - } - ], - "inputAnchors": [ - { - "label": "Cache", - "name": "cache", - "type": "BaseCache", - "optional": true, - "id": "chatOpenAI_1-input-cache-BaseCache" - } - ], - "inputs": { - "cache": "", - "modelName": "gpt-3.5-turbo", - "temperature": 0.9, - "maxTokens": "", - "topP": "", - "frequencyPenalty": "", - "presencePenalty": "", - "timeout": "", - "basepath": "", - "baseOptions": "", - "allowImageUploads": true, - "imageResolution": "low" - }, - "outputAnchors": [ - { - "id": "chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", - "name": "chatOpenAI", - "label": "ChatOpenAI", - "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | Runnable" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 2351.7234095119156, - "y": -394.0409300837044 - }, - "dragging": false - }, - { - "width": 300, - "height": 670, - "id": "chatOpenAI_2", - "position": { - "x": 789.3453885560219, - "y": -179.07897273438854 - }, - "type": "customNode", - "data": { - "id": "chatOpenAI_2", - "label": "ChatOpenAI", - "version": 6, - "name": "chatOpenAI", - "type": "ChatOpenAI", - "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "Runnable"], - "category": "Chat Models", - "description": "Wrapper around OpenAI large language models that use the Chat endpoint", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["openAIApi"], - "id": "chatOpenAI_2-input-credential-credential" - }, - { - "label": "Model Name", - "name": "modelName", - "type": "asyncOptions", - "loadMethod": "listModels", - "default": "gpt-3.5-turbo", - "id": "chatOpenAI_2-input-modelName-options" - }, - { - "label": "Temperature", - "name": "temperature", - "type": "number", - "step": 0.1, - "default": 0.9, - "optional": true, - "id": "chatOpenAI_2-input-temperature-number" - }, - { - "label": "Max Tokens", - "name": "maxTokens", - "type": "number", - "step": 1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_2-input-maxTokens-number" - }, - { - "label": "Top Probability", - "name": "topP", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_2-input-topP-number" - }, - { - "label": "Frequency Penalty", - "name": "frequencyPenalty", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_2-input-frequencyPenalty-number" - }, - { - "label": "Presence Penalty", - "name": "presencePenalty", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_2-input-presencePenalty-number" - }, - { - "label": "Timeout", - "name": "timeout", - "type": "number", - "step": 1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_2-input-timeout-number" - }, - { - "label": "BasePath", - "name": "basepath", - "type": "string", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_2-input-basepath-string" - }, - { - "label": "BaseOptions", - "name": "baseOptions", - "type": "json", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_2-input-baseOptions-json" - }, - { - "label": "Allow Image Uploads", - "name": "allowImageUploads", - "type": "boolean", - "description": "Automatically uses gpt-4-vision-preview when image is being uploaded from chat. Only works with LLMChain, Conversation Chain, ReAct Agent, and Conversational Agent", - "default": false, - "optional": true, - "id": "chatOpenAI_2-input-allowImageUploads-boolean" - }, - { - "label": "Image Resolution", - "description": "This parameter controls the resolution in which the model views the image.", - "name": "imageResolution", - "type": "options", - "options": [ - { - "label": "Low", - "name": "low" - }, - { - "label": "High", - "name": "high" - }, - { - "label": "Auto", - "name": "auto" - } - ], - "default": "low", - "optional": false, - "additionalParams": true, - "id": "chatOpenAI_2-input-imageResolution-options" - } - ], - "inputAnchors": [ - { - "label": "Cache", - "name": "cache", - "type": "BaseCache", - "optional": true, - "id": "chatOpenAI_2-input-cache-BaseCache" - } - ], - "inputs": { - "cache": "", - "modelName": "gpt-3.5-turbo", - "temperature": 0.9, - "maxTokens": "", - "topP": "", - "frequencyPenalty": "", - "presencePenalty": "", - "timeout": "", - "basepath": "", - "baseOptions": "", - "allowImageUploads": true, - "imageResolution": "low" - }, - "outputAnchors": [ - { - "id": "chatOpenAI_2-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", - "name": "chatOpenAI", - "label": "ChatOpenAI", - "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | Runnable" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 789.3453885560219, - "y": -179.07897273438854 - }, - "dragging": false - }, - { - "id": "chatAnthropic_0", - "position": { - "x": 2381.220361865136, - "y": 393.26149491753074 - }, - "type": "customNode", - "data": { - "id": "chatAnthropic_0", - "label": "ChatAnthropic", - "version": 6, - "name": "chatAnthropic", - "type": "ChatAnthropic", - "baseClasses": ["ChatAnthropic", "ChatAnthropicMessages", "BaseChatModel", "BaseLanguageModel", "Runnable"], - "category": "Chat Models", - "description": "Wrapper around ChatAnthropic large language models that use the Chat endpoint", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["anthropicApi"], - "id": "chatAnthropic_0-input-credential-credential" - }, - { - "label": "Model Name", - "name": "modelName", - "type": "asyncOptions", - "loadMethod": "listModels", - "default": "claude-3-haiku", - "id": "chatAnthropic_0-input-modelName-asyncOptions" - }, - { - "label": "Temperature", - "name": "temperature", - "type": "number", - "step": 0.1, - "default": 0.9, - "optional": true, - "id": "chatAnthropic_0-input-temperature-number" - }, - { - "label": "Max Tokens", - "name": "maxTokensToSample", - "type": "number", - "step": 1, - "optional": true, - "additionalParams": true, - "id": "chatAnthropic_0-input-maxTokensToSample-number" - }, - { - "label": "Top P", - "name": "topP", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatAnthropic_0-input-topP-number" - }, - { - "label": "Top K", - "name": "topK", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatAnthropic_0-input-topK-number" - }, - { - "label": "Allow Image Uploads", - "name": "allowImageUploads", - "type": "boolean", - "description": "Automatically uses claude-3-* models when image is being uploaded from chat. Only works with LLMChain, Conversation Chain, ReAct Agent, and Conversational Agent", - "default": false, - "optional": true, - "id": "chatAnthropic_0-input-allowImageUploads-boolean" - } - ], - "inputAnchors": [ - { - "label": "Cache", - "name": "cache", - "type": "BaseCache", - "optional": true, - "id": "chatAnthropic_0-input-cache-BaseCache" - } - ], - "inputs": { - "cache": "", - "modelName": "claude-3-haiku-20240307", - "temperature": 0.9, - "maxTokensToSample": "", - "topP": "", - "topK": "", - "allowImageUploads": "" - }, - "outputAnchors": [ - { - "id": "chatAnthropic_0-output-chatAnthropic-ChatAnthropic|ChatAnthropicMessages|BaseChatModel|BaseLanguageModel|Runnable", - "name": "chatAnthropic", - "label": "ChatAnthropic", - "description": "Wrapper around ChatAnthropic large language models that use the Chat endpoint", - "type": "ChatAnthropic | ChatAnthropicMessages | BaseChatModel | BaseLanguageModel | Runnable" - } - ], - "outputs": {}, - "selected": false - }, - "width": 300, - "height": 670, - "selected": false, - "dragging": false, - "positionAbsolute": { - "x": 2381.220361865136, - "y": 393.26149491753074 - } - }, - { - "id": "stickyNote_0", - "position": { - "x": 1585.520839473698, - "y": 51.83677692300674 - }, - "type": "stickyNote", - "data": { - "id": "stickyNote_0", - "label": "Sticky Note", - "version": 2, - "name": "stickyNote", - "type": "StickyNote", - "baseClasses": ["StickyNote"], - "tags": ["Utilities"], - "category": "Utilities", - "description": "Add a sticky note", - "inputParams": [ - { - "label": "", - "name": "note", - "type": "string", - "rows": 1, - "placeholder": "Type something here", - "optional": true, - "id": "stickyNote_0-input-note-string" - } - ], - "inputAnchors": [], - "inputs": { - "note": "Split the path into 2 ways according to condition\n\n1. If response from first LLM Chain contains the word \"task\", carry on with the next prompt\n\n2. Otherwise, politely reject user's request" - }, - "outputAnchors": [ - { - "id": "stickyNote_0-output-stickyNote-StickyNote", - "name": "stickyNote", - "label": "StickyNote", - "description": "Add a sticky note", - "type": "StickyNote" - } - ], - "outputs": {}, - "selected": false - }, - "width": 300, - "height": 183, - "selected": false, - "positionAbsolute": { - "x": 1585.520839473698, - "y": 51.83677692300674 - }, - "dragging": false - }, - { - "id": "stickyNote_1", - "position": { - "x": 2791.378655166414, - "y": 699.1817665106969 - }, - "type": "stickyNote", - "data": { - "id": "stickyNote_1", - "label": "Sticky Note", - "version": 2, - "name": "stickyNote", - "type": "StickyNote", - "baseClasses": ["StickyNote"], - "tags": ["Utilities"], - "category": "Utilities", - "description": "Add a sticky note", - "inputParams": [ - { - "label": "", - "name": "note", - "type": "string", - "rows": 1, - "placeholder": "Type something here", - "optional": true, - "id": "stickyNote_1-input-note-string" - } - ], - "inputAnchors": [], - "inputs": { - "note": "Example question:\n\n- Solve world hunger" - }, - "outputAnchors": [ - { - "id": "stickyNote_1-output-stickyNote-StickyNote", - "name": "stickyNote", - "label": "StickyNote", - "description": "Add a sticky note", - "type": "StickyNote" - } - ], - "outputs": {}, - "selected": false - }, - "width": 300, - "height": 82, - "selected": false, - "positionAbsolute": { - "x": 2791.378655166414, - "y": 699.1817665106969 - }, - "dragging": false - } - ], - "edges": [ - { - "source": "promptTemplate_0", - "sourceHandle": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", - "target": "llmChain_0", - "targetHandle": "llmChain_0-input-prompt-BasePromptTemplate", - "type": "buttonedge", - "id": "promptTemplate_0-promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_0-llmChain_0-input-prompt-BasePromptTemplate", - "data": { - "label": "" - } - }, - { - "source": "promptTemplate_1", - "sourceHandle": "promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", - "target": "llmChain_1", - "targetHandle": "llmChain_1-input-prompt-BasePromptTemplate", - "type": "buttonedge", - "id": "promptTemplate_1-promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_1-llmChain_1-input-prompt-BasePromptTemplate", - "data": { - "label": "" - } - }, - { - "source": "promptTemplate_2", - "sourceHandle": "promptTemplate_2-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", - "target": "llmChain_2", - "targetHandle": "llmChain_2-input-prompt-BasePromptTemplate", - "type": "buttonedge", - "id": "promptTemplate_2-promptTemplate_2-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_2-llmChain_2-input-prompt-BasePromptTemplate" - }, - { - "source": "llmChain_0", - "sourceHandle": "llmChain_0-output-outputPrediction-string|json", - "target": "ifElseFunction_0", - "targetHandle": "ifElseFunction_0-input-functionInputVariables-json", - "type": "buttonedge", - "id": "llmChain_0-llmChain_0-output-outputPrediction-string|json-ifElseFunction_0-ifElseFunction_0-input-functionInputVariables-json" - }, - { - "source": "ifElseFunction_0", - "sourceHandle": "ifElseFunction_0-output-returnFalse-string|number|boolean|json|array", - "target": "promptTemplate_2", - "targetHandle": "promptTemplate_2-input-promptValues-json", - "type": "buttonedge", - "id": "ifElseFunction_0-ifElseFunction_0-output-returnFalse-string|number|boolean|json|array-promptTemplate_2-promptTemplate_2-input-promptValues-json" - }, - { - "source": "ifElseFunction_0", - "sourceHandle": "ifElseFunction_0-output-returnTrue-string|number|boolean|json|array", - "target": "promptTemplate_1", - "targetHandle": "promptTemplate_1-input-promptValues-json", - "type": "buttonedge", - "id": "ifElseFunction_0-ifElseFunction_0-output-returnTrue-string|number|boolean|json|array-promptTemplate_1-promptTemplate_1-input-promptValues-json" - }, - { - "source": "chatOpenAI_1", - "sourceHandle": "chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", - "target": "llmChain_1", - "targetHandle": "llmChain_1-input-model-BaseLanguageModel", - "type": "buttonedge", - "id": "chatOpenAI_1-chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-llmChain_1-llmChain_1-input-model-BaseLanguageModel" - }, - { - "source": "chatOpenAI_2", - "sourceHandle": "chatOpenAI_2-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", - "target": "llmChain_0", - "targetHandle": "llmChain_0-input-model-BaseLanguageModel", - "type": "buttonedge", - "id": "chatOpenAI_2-chatOpenAI_2-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-llmChain_0-llmChain_0-input-model-BaseLanguageModel" - }, - { - "source": "chatAnthropic_0", - "sourceHandle": "chatAnthropic_0-output-chatAnthropic-ChatAnthropic|ChatAnthropicMessages|BaseChatModel|BaseLanguageModel|Runnable", - "target": "llmChain_2", - "targetHandle": "llmChain_2-input-model-BaseLanguageModel", - "type": "buttonedge", - "id": "chatAnthropic_0-chatAnthropic_0-output-chatAnthropic-ChatAnthropic|ChatAnthropicMessages|BaseChatModel|BaseLanguageModel|Runnable-llmChain_2-llmChain_2-input-model-BaseLanguageModel" - } - ] -} diff --git a/packages/server/marketplaces/chatflows/Local QnA.json b/packages/server/marketplaces/chatflows/Local QnA.json index 063a0eb0f..86f2e488a 100644 --- a/packages/server/marketplaces/chatflows/Local QnA.json +++ b/packages/server/marketplaces/chatflows/Local QnA.json @@ -1,6 +1,5 @@ { "description": "QnA chain using Ollama local LLM, LocalAI embedding model, and Faiss local vector store", - "badge": "POPULAR", "usecases": ["Documents QnA"], "framework": ["Langchain"], "nodes": [ diff --git a/packages/server/marketplaces/chatflows/Multi Prompt Chain.json b/packages/server/marketplaces/chatflows/Multi Prompt Chain.json deleted file mode 100644 index 4cdbda16b..000000000 --- a/packages/server/marketplaces/chatflows/Multi Prompt Chain.json +++ /dev/null @@ -1,499 +0,0 @@ -{ - "description": "A chain that automatically picks an appropriate prompt from multiple prompts", - "usecases": ["Basic"], - "framework": ["Langchain"], - "nodes": [ - { - "width": 300, - "height": 632, - "id": "promptRetriever_0", - "position": { - "x": 197.46642699727397, - "y": 25.945621297410923 - }, - "type": "customNode", - "data": { - "id": "promptRetriever_0", - "label": "Prompt Retriever", - "name": "promptRetriever", - "version": 1, - "type": "PromptRetriever", - "baseClasses": ["PromptRetriever"], - "category": "Retrievers", - "description": "Store prompt template with name & description to be later queried by MultiPromptChain", - "inputParams": [ - { - "label": "Prompt Name", - "name": "name", - "type": "string", - "placeholder": "physics-qa", - "id": "promptRetriever_0-input-name-string" - }, - { - "label": "Prompt Description", - "name": "description", - "type": "string", - "rows": 3, - "description": "Description of what the prompt does and when it should be used", - "placeholder": "Good for answering questions about physics", - "id": "promptRetriever_0-input-description-string" - }, - { - "label": "Prompt System Message", - "name": "systemMessage", - "type": "string", - "rows": 4, - "placeholder": "You are a very smart physics professor. You are great at answering questions about physics in a concise and easy to understand manner. When you don't know the answer to a question you admit that you don't know.", - "id": "promptRetriever_0-input-systemMessage-string" - } - ], - "inputAnchors": [], - "inputs": { - "name": "physics", - "description": "Good for answering questions about physics", - "systemMessage": "You are a very smart physics professor. You are great at answering questions about physics in a concise and easy to understand manner. When you don't know the answer to a question you admit that you don't know." - }, - "outputAnchors": [ - { - "id": "promptRetriever_0-output-promptRetriever-PromptRetriever", - "name": "promptRetriever", - "label": "PromptRetriever", - "type": "PromptRetriever" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 197.46642699727397, - "y": 25.945621297410923 - }, - "dragging": false - }, - { - "width": 300, - "height": 280, - "id": "multiPromptChain_0", - "position": { - "x": 1619.1305522575494, - "y": 210.28103293821243 - }, - "type": "customNode", - "data": { - "id": "multiPromptChain_0", - "label": "Multi Prompt Chain", - "name": "multiPromptChain", - "version": 2, - "type": "MultiPromptChain", - "baseClasses": ["MultiPromptChain", "MultiRouteChain", "BaseChain", "BaseLangChain"], - "category": "Chains", - "description": "Chain automatically picks an appropriate prompt from multiple prompt templates", - "inputParams": [], - "inputAnchors": [ - { - "label": "Language Model", - "name": "model", - "type": "BaseLanguageModel", - "id": "multiPromptChain_0-input-model-BaseLanguageModel" - }, - { - "label": "Prompt Retriever", - "name": "promptRetriever", - "type": "PromptRetriever", - "list": true, - "id": "multiPromptChain_0-input-promptRetriever-PromptRetriever" - }, - { - "label": "Input Moderation", - "description": "Detect text that could generate harmful output and prevent it from being sent to the language model", - "name": "inputModeration", - "type": "Moderation", - "optional": true, - "list": true, - "id": "multiPromptChain_0-input-inputModeration-Moderation" - } - ], - "inputs": { - "inputModeration": "", - "model": "{{chatOpenAI_0.data.instance}}", - "promptRetriever": [ - "{{promptRetriever_0.data.instance}}", - "{{promptRetriever_2.data.instance}}", - "{{promptRetriever_1.data.instance}}" - ] - }, - "outputAnchors": [ - { - "id": "multiPromptChain_0-output-multiPromptChain-MultiPromptChain|MultiRouteChain|BaseChain|BaseLangChain", - "name": "multiPromptChain", - "label": "MultiPromptChain", - "type": "MultiPromptChain | MultiRouteChain | BaseChain | BaseLangChain" - } - ], - "outputs": {}, - "selected": false - }, - "positionAbsolute": { - "x": 1619.1305522575494, - "y": 210.28103293821243 - }, - "selected": false, - "dragging": false - }, - { - "width": 300, - "height": 632, - "id": "promptRetriever_1", - "position": { - "x": 539.1322780233141, - "y": -250.72967142925938 - }, - "type": "customNode", - "data": { - "id": "promptRetriever_1", - "label": "Prompt Retriever", - "name": "promptRetriever", - "version": 1, - "type": "PromptRetriever", - "baseClasses": ["PromptRetriever"], - "category": "Retrievers", - "description": "Store prompt template with name & description to be later queried by MultiPromptChain", - "inputParams": [ - { - "label": "Prompt Name", - "name": "name", - "type": "string", - "placeholder": "physics-qa", - "id": "promptRetriever_1-input-name-string" - }, - { - "label": "Prompt Description", - "name": "description", - "type": "string", - "rows": 3, - "description": "Description of what the prompt does and when it should be used", - "placeholder": "Good for answering questions about physics", - "id": "promptRetriever_1-input-description-string" - }, - { - "label": "Prompt System Message", - "name": "systemMessage", - "type": "string", - "rows": 4, - "placeholder": "You are a very smart physics professor. You are great at answering questions about physics in a concise and easy to understand manner. When you don't know the answer to a question you admit that you don't know.", - "id": "promptRetriever_1-input-systemMessage-string" - } - ], - "inputAnchors": [], - "inputs": { - "name": "math", - "description": "Good for answering math questions", - "systemMessage": "You are a very good mathematician. You are great at answering math questions. You are so good because you are able to break down hard problems into their component parts, answer the component parts, and then put them together to answer the broader question." - }, - "outputAnchors": [ - { - "id": "promptRetriever_1-output-promptRetriever-PromptRetriever", - "name": "promptRetriever", - "label": "PromptRetriever", - "type": "PromptRetriever" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 539.1322780233141, - "y": -250.72967142925938 - }, - "dragging": false - }, - { - "width": 300, - "height": 632, - "id": "promptRetriever_2", - "position": { - "x": 872.6184534864304, - "y": -366.9443140594265 - }, - "type": "customNode", - "data": { - "id": "promptRetriever_2", - "label": "Prompt Retriever", - "name": "promptRetriever", - "version": 1, - "type": "PromptRetriever", - "baseClasses": ["PromptRetriever"], - "category": "Retrievers", - "description": "Store prompt template with name & description to be later queried by MultiPromptChain", - "inputParams": [ - { - "label": "Prompt Name", - "name": "name", - "type": "string", - "placeholder": "physics-qa", - "id": "promptRetriever_2-input-name-string" - }, - { - "label": "Prompt Description", - "name": "description", - "type": "string", - "rows": 3, - "description": "Description of what the prompt does and when it should be used", - "placeholder": "Good for answering questions about physics", - "id": "promptRetriever_2-input-description-string" - }, - { - "label": "Prompt System Message", - "name": "systemMessage", - "type": "string", - "rows": 4, - "placeholder": "You are a very smart physics professor. You are great at answering questions about physics in a concise and easy to understand manner. When you don't know the answer to a question you admit that you don't know.", - "id": "promptRetriever_2-input-systemMessage-string" - } - ], - "inputAnchors": [], - "inputs": { - "name": "history", - "description": "Good for answering questions about history", - "systemMessage": "You are a very smart history professor. You are great at answering questions about history in a concise and easy to understand manner. When you don't know the answer to a question you admit that you don't know." - }, - "outputAnchors": [ - { - "id": "promptRetriever_2-output-promptRetriever-PromptRetriever", - "name": "promptRetriever", - "label": "PromptRetriever", - "type": "PromptRetriever" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 872.6184534864304, - "y": -366.9443140594265 - }, - "dragging": false - }, - { - "width": 300, - "height": 523, - "id": "chatOpenAI_0", - "position": { - "x": 1228.4059611466973, - "y": -326.46419383157513 - }, - "type": "customNode", - "data": { - "id": "chatOpenAI_0", - "label": "ChatOpenAI", - "name": "chatOpenAI", - "version": 6.0, - "type": "ChatOpenAI", - "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel"], - "category": "Chat Models", - "description": "Wrapper around OpenAI large language models that use the Chat endpoint", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["openAIApi"], - "id": "chatOpenAI_0-input-credential-credential" - }, - { - "label": "Model Name", - "name": "modelName", - "type": "asyncOptions", - "loadMethod": "listModels", - "default": "gpt-3.5-turbo", - "id": "chatOpenAI_0-input-modelName-options" - }, - { - "label": "Temperature", - "name": "temperature", - "type": "number", - "default": 0.9, - "optional": true, - "id": "chatOpenAI_0-input-temperature-number" - }, - { - "label": "Max Tokens", - "name": "maxTokens", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-maxTokens-number" - }, - { - "label": "Top Probability", - "name": "topP", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-topP-number" - }, - { - "label": "Frequency Penalty", - "name": "frequencyPenalty", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-frequencyPenalty-number" - }, - { - "label": "Presence Penalty", - "name": "presencePenalty", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-presencePenalty-number" - }, - { - "label": "Timeout", - "name": "timeout", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-timeout-number" - }, - { - "label": "BasePath", - "name": "basepath", - "type": "string", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-basepath-string" - }, - { - "label": "BaseOptions", - "name": "baseOptions", - "type": "json", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-baseOptions-json" - }, - { - "label": "Allow Image Uploads", - "name": "allowImageUploads", - "type": "boolean", - "description": "Automatically uses gpt-4-vision-preview when image is being uploaded from chat. Only works with LLMChain, Conversation Chain, ReAct Agent, and Conversational Agent", - "default": false, - "optional": true, - "id": "chatOpenAI_0-input-allowImageUploads-boolean" - }, - { - "label": "Image Resolution", - "description": "This parameter controls the resolution in which the model views the image.", - "name": "imageResolution", - "type": "options", - "options": [ - { - "label": "Low", - "name": "low" - }, - { - "label": "High", - "name": "high" - }, - { - "label": "Auto", - "name": "auto" - } - ], - "default": "low", - "optional": false, - "additionalParams": true, - "id": "chatOpenAI_0-input-imageResolution-options" - } - ], - "inputAnchors": [ - { - "label": "Cache", - "name": "cache", - "type": "BaseCache", - "optional": true, - "id": "chatOpenAI_0-input-cache-BaseCache" - } - ], - "inputs": { - "modelName": "gpt-3.5-turbo", - "temperature": 0.9, - "maxTokens": "", - "topP": "", - "frequencyPenalty": "", - "presencePenalty": "", - "timeout": "", - "basepath": "", - "baseOptions": "", - "allowImageUploads": true, - "imageResolution": "low" - }, - "outputAnchors": [ - { - "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel", - "name": "chatOpenAI", - "label": "ChatOpenAI", - "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 1228.4059611466973, - "y": -326.46419383157513 - }, - "dragging": false - } - ], - "edges": [ - { - "source": "promptRetriever_0", - "sourceHandle": "promptRetriever_0-output-promptRetriever-PromptRetriever", - "target": "multiPromptChain_0", - "targetHandle": "multiPromptChain_0-input-promptRetriever-PromptRetriever", - "type": "buttonedge", - "id": "promptRetriever_0-promptRetriever_0-output-promptRetriever-PromptRetriever-multiPromptChain_0-multiPromptChain_0-input-promptRetriever-PromptRetriever", - "data": { - "label": "" - } - }, - { - "source": "promptRetriever_2", - "sourceHandle": "promptRetriever_2-output-promptRetriever-PromptRetriever", - "target": "multiPromptChain_0", - "targetHandle": "multiPromptChain_0-input-promptRetriever-PromptRetriever", - "type": "buttonedge", - "id": "promptRetriever_2-promptRetriever_2-output-promptRetriever-PromptRetriever-multiPromptChain_0-multiPromptChain_0-input-promptRetriever-PromptRetriever", - "data": { - "label": "" - } - }, - { - "source": "promptRetriever_1", - "sourceHandle": "promptRetriever_1-output-promptRetriever-PromptRetriever", - "target": "multiPromptChain_0", - "targetHandle": "multiPromptChain_0-input-promptRetriever-PromptRetriever", - "type": "buttonedge", - "id": "promptRetriever_1-promptRetriever_1-output-promptRetriever-PromptRetriever-multiPromptChain_0-multiPromptChain_0-input-promptRetriever-PromptRetriever", - "data": { - "label": "" - } - }, - { - "source": "chatOpenAI_0", - "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel", - "target": "multiPromptChain_0", - "targetHandle": "multiPromptChain_0-input-model-BaseLanguageModel", - "type": "buttonedge", - "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-multiPromptChain_0-multiPromptChain_0-input-model-BaseLanguageModel", - "data": { - "label": "" - } - } - ] -} diff --git a/packages/server/marketplaces/chatflows/Multi Retrieval QA Chain.json b/packages/server/marketplaces/chatflows/Multi Retrieval QA Chain.json deleted file mode 100644 index 298db5abe..000000000 --- a/packages/server/marketplaces/chatflows/Multi Retrieval QA Chain.json +++ /dev/null @@ -1,1222 +0,0 @@ -{ - "description": "A chain that automatically picks an appropriate vector store retriever from multiple different vector databases", - "usecases": ["Documents QnA"], - "framework": ["Langchain"], - "nodes": [ - { - "width": 300, - "height": 506, - "id": "vectorStoreRetriever_0", - "position": { - "x": 712.9322670298264, - "y": 860.5462810572917 - }, - "type": "customNode", - "data": { - "id": "vectorStoreRetriever_0", - "label": "Vector Store Retriever", - "version": 1, - "name": "vectorStoreRetriever", - "type": "VectorStoreRetriever", - "baseClasses": ["VectorStoreRetriever"], - "category": "Retrievers", - "description": "Store vector store as retriever. Used with MultiRetrievalQAChain", - "inputParams": [ - { - "label": "Retriever Name", - "name": "name", - "type": "string", - "placeholder": "netflix movies", - "id": "vectorStoreRetriever_0-input-name-string" - }, - { - "label": "Retriever Description", - "name": "description", - "type": "string", - "rows": 3, - "description": "Description of when to use the vector store retriever", - "placeholder": "Good for answering questions about netflix movies", - "id": "vectorStoreRetriever_0-input-description-string" - } - ], - "inputAnchors": [ - { - "label": "Vector Store", - "name": "vectorStore", - "type": "VectorStore", - "id": "vectorStoreRetriever_0-input-vectorStore-VectorStore" - } - ], - "inputs": { - "vectorStore": "{{supabase_0.data.instance}}", - "name": "aqua teen", - "description": "Good for answering questions about Aqua Teen Hunger Force theme song" - }, - "outputAnchors": [ - { - "id": "vectorStoreRetriever_0-output-vectorStoreRetriever-VectorStoreRetriever", - "name": "vectorStoreRetriever", - "label": "VectorStoreRetriever", - "type": "VectorStoreRetriever" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 712.9322670298264, - "y": 860.5462810572917 - }, - "dragging": false - }, - { - "width": 300, - "height": 429, - "id": "multiRetrievalQAChain_0", - "position": { - "x": 1563.0150452201099, - "y": 460.78375893303934 - }, - "type": "customNode", - "data": { - "id": "multiRetrievalQAChain_0", - "label": "Multi Retrieval QA Chain", - "version": 2, - "name": "multiRetrievalQAChain", - "type": "MultiRetrievalQAChain", - "baseClasses": ["MultiRetrievalQAChain", "MultiRouteChain", "BaseChain", "BaseLangChain"], - "category": "Chains", - "description": "QA Chain that automatically picks an appropriate vector store from multiple retrievers", - "inputParams": [ - { - "label": "Return Source Documents", - "name": "returnSourceDocuments", - "type": "boolean", - "optional": true - } - ], - "inputAnchors": [ - { - "label": "Language Model", - "name": "model", - "type": "BaseLanguageModel", - "id": "multiRetrievalQAChain_0-input-model-BaseLanguageModel" - }, - { - "label": "Vector Store Retriever", - "name": "vectorStoreRetriever", - "type": "VectorStoreRetriever", - "list": true, - "id": "multiRetrievalQAChain_0-input-vectorStoreRetriever-VectorStoreRetriever" - }, - { - "label": "Input Moderation", - "description": "Detect text that could generate harmful output and prevent it from being sent to the language model", - "name": "inputModeration", - "type": "Moderation", - "optional": true, - "list": true, - "id": "multiRetrievalQAChain_0-input-inputModeration-Moderation" - } - ], - "inputs": { - "inputModeration": "", - "model": "{{chatOpenAI_0.data.instance}}", - "vectorStoreRetriever": [ - "{{vectorStoreRetriever_0.data.instance}}", - "{{vectorStoreRetriever_1.data.instance}}", - "{{vectorStoreRetriever_2.data.instance}}" - ] - }, - "outputAnchors": [ - { - "id": "multiRetrievalQAChain_0-output-multiRetrievalQAChain-MultiRetrievalQAChain|MultiRouteChain|BaseChain|BaseLangChain", - "name": "multiRetrievalQAChain", - "label": "MultiRetrievalQAChain", - "type": "MultiRetrievalQAChain | MultiRouteChain | BaseChain | BaseLangChain" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 1563.0150452201099, - "y": 460.78375893303934 - }, - "dragging": false - }, - { - "width": 300, - "height": 506, - "id": "vectorStoreRetriever_1", - "position": { - "x": 711.4902931206071, - "y": 315.2414600651632 - }, - "type": "customNode", - "data": { - "id": "vectorStoreRetriever_1", - "label": "Vector Store Retriever", - "version": 1, - "name": "vectorStoreRetriever", - "type": "VectorStoreRetriever", - "baseClasses": ["VectorStoreRetriever"], - "category": "Retrievers", - "description": "Store vector store as retriever. Used with MultiRetrievalQAChain", - "inputParams": [ - { - "label": "Retriever Name", - "name": "name", - "type": "string", - "placeholder": "netflix movies", - "id": "vectorStoreRetriever_1-input-name-string" - }, - { - "label": "Retriever Description", - "name": "description", - "type": "string", - "rows": 3, - "description": "Description of when to use the vector store retriever", - "placeholder": "Good for answering questions about netflix movies", - "id": "vectorStoreRetriever_1-input-description-string" - } - ], - "inputAnchors": [ - { - "label": "Vector Store", - "name": "vectorStore", - "type": "VectorStore", - "id": "vectorStoreRetriever_1-input-vectorStore-VectorStore" - } - ], - "inputs": { - "vectorStore": "{{chroma_0.data.instance}}", - "name": "mst3k", - "description": "Good for answering questions about Mystery Science Theater 3000 theme song" - }, - "outputAnchors": [ - { - "id": "vectorStoreRetriever_1-output-vectorStoreRetriever-VectorStoreRetriever", - "name": "vectorStoreRetriever", - "label": "VectorStoreRetriever", - "type": "VectorStoreRetriever" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 711.4902931206071, - "y": 315.2414600651632 - }, - "dragging": false - }, - { - "width": 300, - "height": 506, - "id": "vectorStoreRetriever_2", - "position": { - "x": 706.0716220151372, - "y": -217.51566869136752 - }, - "type": "customNode", - "data": { - "id": "vectorStoreRetriever_2", - "label": "Vector Store Retriever", - "version": 1, - "name": "vectorStoreRetriever", - "type": "VectorStoreRetriever", - "baseClasses": ["VectorStoreRetriever"], - "category": "Retrievers", - "description": "Store vector store as retriever. Used with MultiRetrievalQAChain", - "inputParams": [ - { - "label": "Retriever Name", - "name": "name", - "type": "string", - "placeholder": "netflix movies", - "id": "vectorStoreRetriever_2-input-name-string" - }, - { - "label": "Retriever Description", - "name": "description", - "type": "string", - "rows": 3, - "description": "Description of when to use the vector store retriever", - "placeholder": "Good for answering questions about netflix movies", - "id": "vectorStoreRetriever_2-input-description-string" - } - ], - "inputAnchors": [ - { - "label": "Vector Store", - "name": "vectorStore", - "type": "VectorStore", - "id": "vectorStoreRetriever_2-input-vectorStore-VectorStore" - } - ], - "inputs": { - "vectorStore": "{{pinecone_0.data.instance}}", - "name": "animaniacs", - "description": "Good for answering questions about Animaniacs theme song" - }, - "outputAnchors": [ - { - "id": "vectorStoreRetriever_2-output-vectorStoreRetriever-VectorStoreRetriever", - "name": "vectorStoreRetriever", - "label": "VectorStoreRetriever", - "type": "VectorStoreRetriever" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 706.0716220151372, - "y": -217.51566869136752 - }, - "dragging": false - }, - { - "width": 300, - "height": 424, - "id": "openAIEmbeddings_0", - "position": { - "x": -212.46977797044045, - "y": 252.45726960585722 - }, - "type": "customNode", - "data": { - "id": "openAIEmbeddings_0", - "label": "OpenAI Embeddings", - "version": 4, - "name": "openAIEmbeddings", - "type": "OpenAIEmbeddings", - "baseClasses": ["OpenAIEmbeddings", "Embeddings"], - "category": "Embeddings", - "description": "OpenAI API to generate embeddings for a given text", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["openAIApi"], - "id": "openAIEmbeddings_0-input-credential-credential" - }, - { - "label": "Model Name", - "name": "modelName", - "type": "asyncOptions", - "loadMethod": "listModels", - "default": "text-embedding-ada-002", - "id": "openAIEmbeddings_0-input-modelName-asyncOptions" - }, - { - "label": "Strip New Lines", - "name": "stripNewLines", - "type": "boolean", - "optional": true, - "additionalParams": true, - "id": "openAIEmbeddings_0-input-stripNewLines-boolean" - }, - { - "label": "Batch Size", - "name": "batchSize", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "openAIEmbeddings_0-input-batchSize-number" - }, - { - "label": "Timeout", - "name": "timeout", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "openAIEmbeddings_0-input-timeout-number" - }, - { - "label": "BasePath", - "name": "basepath", - "type": "string", - "optional": true, - "additionalParams": true, - "id": "openAIEmbeddings_0-input-basepath-string" - }, - { - "label": "Dimensions", - "name": "dimensions", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "openAIEmbeddings_0-input-dimensions-number" - } - ], - "inputAnchors": [], - "inputs": { - "modelName": "text-embedding-ada-002", - "stripNewLines": "", - "batchSize": "", - "timeout": "", - "basepath": "", - "dimensions": "" - }, - "outputAnchors": [ - { - "id": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", - "name": "openAIEmbeddings", - "label": "OpenAIEmbeddings", - "description": "OpenAI API to generate embeddings for a given text", - "type": "OpenAIEmbeddings | Embeddings" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": -212.46977797044045, - "y": 252.45726960585722 - }, - "dragging": false - }, - { - "width": 300, - "height": 670, - "id": "chatOpenAI_0", - "position": { - "x": 1166.929741805626, - "y": -297.9691758089252 - }, - "type": "customNode", - "data": { - "id": "chatOpenAI_0", - "label": "ChatOpenAI", - "version": 6, - "name": "chatOpenAI", - "type": "ChatOpenAI", - "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "Runnable"], - "category": "Chat Models", - "description": "Wrapper around OpenAI large language models that use the Chat endpoint", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["openAIApi"], - "id": "chatOpenAI_0-input-credential-credential" - }, - { - "label": "Model Name", - "name": "modelName", - "type": "asyncOptions", - "loadMethod": "listModels", - "default": "gpt-3.5-turbo", - "id": "chatOpenAI_0-input-modelName-options" - }, - { - "label": "Temperature", - "name": "temperature", - "type": "number", - "step": 0.1, - "default": 0.9, - "optional": true, - "id": "chatOpenAI_0-input-temperature-number" - }, - { - "label": "Max Tokens", - "name": "maxTokens", - "type": "number", - "step": 1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-maxTokens-number" - }, - { - "label": "Top Probability", - "name": "topP", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-topP-number" - }, - { - "label": "Frequency Penalty", - "name": "frequencyPenalty", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-frequencyPenalty-number" - }, - { - "label": "Presence Penalty", - "name": "presencePenalty", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-presencePenalty-number" - }, - { - "label": "Timeout", - "name": "timeout", - "type": "number", - "step": 1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-timeout-number" - }, - { - "label": "BasePath", - "name": "basepath", - "type": "string", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-basepath-string" - }, - { - "label": "BaseOptions", - "name": "baseOptions", - "type": "json", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-baseOptions-json" - }, - { - "label": "Allow Image Uploads", - "name": "allowImageUploads", - "type": "boolean", - "description": "Automatically uses gpt-4-vision-preview when image is being uploaded from chat. Only works with LLMChain, Conversation Chain, ReAct Agent, and Conversational Agent", - "default": false, - "optional": true, - "id": "chatOpenAI_0-input-allowImageUploads-boolean" - }, - { - "label": "Image Resolution", - "description": "This parameter controls the resolution in which the model views the image.", - "name": "imageResolution", - "type": "options", - "options": [ - { - "label": "Low", - "name": "low" - }, - { - "label": "High", - "name": "high" - }, - { - "label": "Auto", - "name": "auto" - } - ], - "default": "low", - "optional": false, - "additionalParams": true, - "id": "chatOpenAI_0-input-imageResolution-options" - } - ], - "inputAnchors": [ - { - "label": "Cache", - "name": "cache", - "type": "BaseCache", - "optional": true, - "id": "chatOpenAI_0-input-cache-BaseCache" - } - ], - "inputs": { - "cache": "", - "modelName": "gpt-3.5-turbo", - "temperature": 0.9, - "maxTokens": "", - "topP": "", - "frequencyPenalty": "", - "presencePenalty": "", - "timeout": "", - "basepath": "", - "baseOptions": "", - "allowImageUploads": true, - "imageResolution": "low" - }, - "outputAnchors": [ - { - "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", - "name": "chatOpenAI", - "label": "ChatOpenAI", - "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | Runnable" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 1166.929741805626, - "y": -297.9691758089252 - }, - "dragging": false - }, - { - "width": 300, - "height": 606, - "id": "pinecone_0", - "position": { - "x": 268.04147939086755, - "y": -407.5681206851249 - }, - "type": "customNode", - "data": { - "id": "pinecone_0", - "label": "Pinecone", - "version": 3, - "name": "pinecone", - "type": "Pinecone", - "baseClasses": ["Pinecone", "VectorStoreRetriever", "BaseRetriever"], - "category": "Vector Stores", - "description": "Upsert embedded data and perform similarity or mmr search using Pinecone, a leading fully managed hosted vector database", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["pineconeApi"], - "id": "pinecone_0-input-credential-credential" - }, - { - "label": "Pinecone Index", - "name": "pineconeIndex", - "type": "string", - "id": "pinecone_0-input-pineconeIndex-string" - }, - { - "label": "Pinecone Namespace", - "name": "pineconeNamespace", - "type": "string", - "placeholder": "my-first-namespace", - "additionalParams": true, - "optional": true, - "id": "pinecone_0-input-pineconeNamespace-string" - }, - { - "label": "Pinecone Metadata Filter", - "name": "pineconeMetadataFilter", - "type": "json", - "optional": true, - "additionalParams": true, - "id": "pinecone_0-input-pineconeMetadataFilter-json" - }, - { - "label": "Top K", - "name": "topK", - "description": "Number of top results to fetch. Default to 4", - "placeholder": "4", - "type": "number", - "additionalParams": true, - "optional": true, - "id": "pinecone_0-input-topK-number" - }, - { - "label": "Search Type", - "name": "searchType", - "type": "options", - "default": "similarity", - "options": [ - { - "label": "Similarity", - "name": "similarity" - }, - { - "label": "Max Marginal Relevance", - "name": "mmr" - } - ], - "additionalParams": true, - "optional": true, - "id": "pinecone_0-input-searchType-options" - }, - { - "label": "Fetch K (for MMR Search)", - "name": "fetchK", - "description": "Number of initial documents to fetch for MMR reranking. Default to 20. Used only when the search type is MMR", - "placeholder": "20", - "type": "number", - "additionalParams": true, - "optional": true, - "id": "pinecone_0-input-fetchK-number" - }, - { - "label": "Lambda (for MMR Search)", - "name": "lambda", - "description": "Number between 0 and 1 that determines the degree of diversity among the results, where 0 corresponds to maximum diversity and 1 to minimum diversity. Used only when the search type is MMR", - "placeholder": "0.5", - "type": "number", - "additionalParams": true, - "optional": true, - "id": "pinecone_0-input-lambda-number" - } - ], - "inputAnchors": [ - { - "label": "Document", - "name": "document", - "type": "Document", - "list": true, - "optional": true, - "id": "pinecone_0-input-document-Document" - }, - { - "label": "Embeddings", - "name": "embeddings", - "type": "Embeddings", - "id": "pinecone_0-input-embeddings-Embeddings" - }, - { - "label": "Record Manager", - "name": "recordManager", - "type": "RecordManager", - "description": "Keep track of the record to prevent duplication", - "optional": true, - "id": "pinecone_0-input-recordManager-RecordManager" - } - ], - "inputs": { - "document": "", - "embeddings": "{{openAIEmbeddings_0.data.instance}}", - "recordManager": "", - "pineconeIndex": "", - "pineconeNamespace": "", - "pineconeMetadataFilter": "", - "topK": "", - "searchType": "similarity", - "fetchK": "", - "lambda": "" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "description": "", - "options": [ - { - "id": "pinecone_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", - "name": "retriever", - "label": "Pinecone Retriever", - "description": "", - "type": "Pinecone | VectorStoreRetriever | BaseRetriever" - }, - { - "id": "pinecone_0-output-vectorStore-Pinecone|VectorStore", - "name": "vectorStore", - "label": "Pinecone Vector Store", - "description": "", - "type": "Pinecone | VectorStore" - } - ], - "default": "retriever" - } - ], - "outputs": { - "output": "vectorStore" - }, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 268.04147939086755, - "y": -407.5681206851249 - }, - "dragging": false - }, - { - "width": 300, - "height": 704, - "id": "chroma_0", - "position": { - "x": 271.26687710753146, - "y": 240.7980496352519 - }, - "type": "customNode", - "data": { - "id": "chroma_0", - "label": "Chroma", - "version": 2, - "name": "chroma", - "type": "Chroma", - "baseClasses": ["Chroma", "VectorStoreRetriever", "BaseRetriever"], - "category": "Vector Stores", - "description": "Upsert embedded data and perform similarity search upon query using Chroma, an open-source embedding database", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "description": "Only needed if you have chroma on cloud services with X-Api-key", - "optional": true, - "credentialNames": ["chromaApi"], - "id": "chroma_0-input-credential-credential" - }, - { - "label": "Collection Name", - "name": "collectionName", - "type": "string", - "id": "chroma_0-input-collectionName-string" - }, - { - "label": "Chroma URL", - "name": "chromaURL", - "type": "string", - "optional": true, - "id": "chroma_0-input-chromaURL-string" - }, - { - "label": "Chroma Metadata Filter", - "name": "chromaMetadataFilter", - "type": "json", - "optional": true, - "additionalParams": true, - "id": "chroma_0-input-chromaMetadataFilter-json" - }, - { - "label": "Top K", - "name": "topK", - "description": "Number of top results to fetch. Default to 4", - "placeholder": "4", - "type": "number", - "additionalParams": true, - "optional": true, - "id": "chroma_0-input-topK-number" - } - ], - "inputAnchors": [ - { - "label": "Document", - "name": "document", - "type": "Document", - "list": true, - "optional": true, - "id": "chroma_0-input-document-Document" - }, - { - "label": "Embeddings", - "name": "embeddings", - "type": "Embeddings", - "id": "chroma_0-input-embeddings-Embeddings" - }, - { - "label": "Record Manager", - "name": "recordManager", - "type": "RecordManager", - "description": "Keep track of the record to prevent duplication", - "optional": true, - "id": "chroma_0-input-recordManager-RecordManager" - } - ], - "inputs": { - "document": "", - "embeddings": "{{openAIEmbeddings_0.data.instance}}", - "recordManager": "", - "collectionName": "", - "chromaURL": "", - "chromaMetadataFilter": "", - "topK": "" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "description": "", - "options": [ - { - "id": "chroma_0-output-retriever-Chroma|VectorStoreRetriever|BaseRetriever", - "name": "retriever", - "label": "Chroma Retriever", - "description": "", - "type": "Chroma | VectorStoreRetriever | BaseRetriever" - }, - { - "id": "chroma_0-output-vectorStore-Chroma|VectorStore", - "name": "vectorStore", - "label": "Chroma Vector Store", - "description": "", - "type": "Chroma | VectorStore" - } - ], - "default": "retriever" - } - ], - "outputs": { - "output": "vectorStore" - }, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 271.26687710753146, - "y": 240.7980496352519 - }, - "dragging": false - }, - { - "width": 300, - "height": 803, - "id": "supabase_0", - "position": { - "x": 274.75982285806055, - "y": 982.5186034037372 - }, - "type": "customNode", - "data": { - "id": "supabase_0", - "label": "Supabase", - "version": 4, - "name": "supabase", - "type": "Supabase", - "baseClasses": ["Supabase", "VectorStoreRetriever", "BaseRetriever"], - "category": "Vector Stores", - "description": "Upsert embedded data and perform similarity or mmr search upon query using Supabase via pgvector extension", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["supabaseApi"], - "id": "supabase_0-input-credential-credential" - }, - { - "label": "Supabase Project URL", - "name": "supabaseProjUrl", - "type": "string", - "id": "supabase_0-input-supabaseProjUrl-string" - }, - { - "label": "Table Name", - "name": "tableName", - "type": "string", - "id": "supabase_0-input-tableName-string" - }, - { - "label": "Query Name", - "name": "queryName", - "type": "string", - "id": "supabase_0-input-queryName-string" - }, - { - "label": "Supabase Metadata Filter", - "name": "supabaseMetadataFilter", - "type": "json", - "optional": true, - "additionalParams": true, - "id": "supabase_0-input-supabaseMetadataFilter-json" - }, - { - "label": "Supabase RPC Filter", - "name": "supabaseRPCFilter", - "type": "string", - "rows": 4, - "placeholder": "filter(\"metadata->a::int\", \"gt\", 5)\n.filter(\"metadata->c::int\", \"gt\", 7)\n.filter(\"metadata->>stuff\", \"eq\", \"right\");", - "description": "Query builder-style filtering. If this is set, will override the metadata filter. Refer here for more information", - "optional": true, - "additionalParams": true, - "id": "supabase_0-input-supabaseRPCFilter-string" - }, - { - "label": "Top K", - "name": "topK", - "description": "Number of top results to fetch. Default to 4", - "placeholder": "4", - "type": "number", - "additionalParams": true, - "optional": true, - "id": "supabase_0-input-topK-number" - }, - { - "label": "Search Type", - "name": "searchType", - "type": "options", - "default": "similarity", - "options": [ - { - "label": "Similarity", - "name": "similarity" - }, - { - "label": "Max Marginal Relevance", - "name": "mmr" - } - ], - "additionalParams": true, - "optional": true, - "id": "supabase_0-input-searchType-options" - }, - { - "label": "Fetch K (for MMR Search)", - "name": "fetchK", - "description": "Number of initial documents to fetch for MMR reranking. Default to 20. Used only when the search type is MMR", - "placeholder": "20", - "type": "number", - "additionalParams": true, - "optional": true, - "id": "supabase_0-input-fetchK-number" - }, - { - "label": "Lambda (for MMR Search)", - "name": "lambda", - "description": "Number between 0 and 1 that determines the degree of diversity among the results, where 0 corresponds to maximum diversity and 1 to minimum diversity. Used only when the search type is MMR", - "placeholder": "0.5", - "type": "number", - "additionalParams": true, - "optional": true, - "id": "supabase_0-input-lambda-number" - } - ], - "inputAnchors": [ - { - "label": "Document", - "name": "document", - "type": "Document", - "list": true, - "optional": true, - "id": "supabase_0-input-document-Document" - }, - { - "label": "Embeddings", - "name": "embeddings", - "type": "Embeddings", - "id": "supabase_0-input-embeddings-Embeddings" - }, - { - "label": "Record Manager", - "name": "recordManager", - "type": "RecordManager", - "description": "Keep track of the record to prevent duplication", - "optional": true, - "id": "supabase_0-input-recordManager-RecordManager" - } - ], - "inputs": { - "document": "", - "embeddings": "{{openAIEmbeddings_0.data.instance}}", - "recordManager": "", - "supabaseProjUrl": "", - "tableName": "", - "queryName": "", - "supabaseMetadataFilter": "", - "supabaseRPCFilter": "", - "topK": "", - "searchType": "similarity", - "fetchK": "", - "lambda": "" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "description": "", - "options": [ - { - "id": "supabase_0-output-retriever-Supabase|VectorStoreRetriever|BaseRetriever", - "name": "retriever", - "label": "Supabase Retriever", - "description": "", - "type": "Supabase | VectorStoreRetriever | BaseRetriever" - }, - { - "id": "supabase_0-output-vectorStore-Supabase|VectorStore", - "name": "vectorStore", - "label": "Supabase Vector Store", - "description": "", - "type": "Supabase | VectorStore" - } - ], - "default": "retriever" - } - ], - "outputs": { - "output": "vectorStore" - }, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 274.75982285806055, - "y": 982.5186034037372 - }, - "dragging": false - }, - { - "id": "stickyNote_0", - "position": { - "x": 1564.4709721348295, - "y": 121.26040803337389 - }, - "type": "stickyNote", - "data": { - "id": "stickyNote_0", - "label": "Sticky Note", - "version": 2, - "name": "stickyNote", - "type": "StickyNote", - "baseClasses": ["StickyNote"], - "tags": ["Utilities"], - "category": "Utilities", - "description": "Add a sticky note", - "inputParams": [ - { - "label": "", - "name": "note", - "type": "string", - "rows": 1, - "placeholder": "Type something here", - "optional": true, - "id": "stickyNote_0-input-note-string" - } - ], - "inputAnchors": [], - "inputs": { - "note": "Multi Retrieval QA Chain is able to pick which Vector Store Retriever to use based on user question.\n\nHowever it comes with the restriction for not being able to resume follow up conversations because there isn't any memory.\n\nIt is suitable for LLM which doesn't have function calling support.\n\nOtherwise, it is recommended to use Multiple Documents QnA template which uses Tool Agent" - }, - "outputAnchors": [ - { - "id": "stickyNote_0-output-stickyNote-StickyNote", - "name": "stickyNote", - "label": "StickyNote", - "description": "Add a sticky note", - "type": "StickyNote" - } - ], - "outputs": {}, - "selected": false - }, - "width": 300, - "height": 324, - "selected": false, - "positionAbsolute": { - "x": 1564.4709721348295, - "y": 121.26040803337389 - }, - "dragging": false - } - ], - "edges": [ - { - "source": "vectorStoreRetriever_0", - "sourceHandle": "vectorStoreRetriever_0-output-vectorStoreRetriever-VectorStoreRetriever", - "target": "multiRetrievalQAChain_0", - "targetHandle": "multiRetrievalQAChain_0-input-vectorStoreRetriever-VectorStoreRetriever", - "type": "buttonedge", - "id": "vectorStoreRetriever_0-vectorStoreRetriever_0-output-vectorStoreRetriever-VectorStoreRetriever-multiRetrievalQAChain_0-multiRetrievalQAChain_0-input-vectorStoreRetriever-VectorStoreRetriever", - "data": { - "label": "" - } - }, - { - "source": "vectorStoreRetriever_1", - "sourceHandle": "vectorStoreRetriever_1-output-vectorStoreRetriever-VectorStoreRetriever", - "target": "multiRetrievalQAChain_0", - "targetHandle": "multiRetrievalQAChain_0-input-vectorStoreRetriever-VectorStoreRetriever", - "type": "buttonedge", - "id": "vectorStoreRetriever_1-vectorStoreRetriever_1-output-vectorStoreRetriever-VectorStoreRetriever-multiRetrievalQAChain_0-multiRetrievalQAChain_0-input-vectorStoreRetriever-VectorStoreRetriever", - "data": { - "label": "" - } - }, - { - "source": "vectorStoreRetriever_2", - "sourceHandle": "vectorStoreRetriever_2-output-vectorStoreRetriever-VectorStoreRetriever", - "target": "multiRetrievalQAChain_0", - "targetHandle": "multiRetrievalQAChain_0-input-vectorStoreRetriever-VectorStoreRetriever", - "type": "buttonedge", - "id": "vectorStoreRetriever_2-vectorStoreRetriever_2-output-vectorStoreRetriever-VectorStoreRetriever-multiRetrievalQAChain_0-multiRetrievalQAChain_0-input-vectorStoreRetriever-VectorStoreRetriever", - "data": { - "label": "" - } - }, - { - "source": "chatOpenAI_0", - "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", - "target": "multiRetrievalQAChain_0", - "targetHandle": "multiRetrievalQAChain_0-input-model-BaseLanguageModel", - "type": "buttonedge", - "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-multiRetrievalQAChain_0-multiRetrievalQAChain_0-input-model-BaseLanguageModel", - "data": { - "label": "" - } - }, - { - "source": "pinecone_0", - "sourceHandle": "pinecone_0-output-vectorStore-Pinecone|VectorStore", - "target": "vectorStoreRetriever_2", - "targetHandle": "vectorStoreRetriever_2-input-vectorStore-VectorStore", - "type": "buttonedge", - "id": "pinecone_0-pinecone_0-output-vectorStore-Pinecone|VectorStore-vectorStoreRetriever_2-vectorStoreRetriever_2-input-vectorStore-VectorStore", - "data": { - "label": "" - } - }, - { - "source": "chroma_0", - "sourceHandle": "chroma_0-output-vectorStore-Chroma|VectorStore", - "target": "vectorStoreRetriever_1", - "targetHandle": "vectorStoreRetriever_1-input-vectorStore-VectorStore", - "type": "buttonedge", - "id": "chroma_0-chroma_0-output-vectorStore-Chroma|VectorStore-vectorStoreRetriever_1-vectorStoreRetriever_1-input-vectorStore-VectorStore", - "data": { - "label": "" - } - }, - { - "source": "supabase_0", - "sourceHandle": "supabase_0-output-vectorStore-Supabase|VectorStore", - "target": "vectorStoreRetriever_0", - "targetHandle": "vectorStoreRetriever_0-input-vectorStore-VectorStore", - "type": "buttonedge", - "id": "supabase_0-supabase_0-output-vectorStore-Supabase|VectorStore-vectorStoreRetriever_0-vectorStoreRetriever_0-input-vectorStore-VectorStore", - "data": { - "label": "" - } - }, - { - "source": "openAIEmbeddings_0", - "sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", - "target": "supabase_0", - "targetHandle": "supabase_0-input-embeddings-Embeddings", - "type": "buttonedge", - "id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-supabase_0-supabase_0-input-embeddings-Embeddings", - "data": { - "label": "" - } - }, - { - "source": "openAIEmbeddings_0", - "sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", - "target": "chroma_0", - "targetHandle": "chroma_0-input-embeddings-Embeddings", - "type": "buttonedge", - "id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-chroma_0-chroma_0-input-embeddings-Embeddings", - "data": { - "label": "" - } - }, - { - "source": "openAIEmbeddings_0", - "sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", - "target": "pinecone_0", - "targetHandle": "pinecone_0-input-embeddings-Embeddings", - "type": "buttonedge", - "id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-pinecone_0-pinecone_0-input-embeddings-Embeddings", - "data": { - "label": "" - } - } - ] -} diff --git a/packages/server/marketplaces/chatflows/Multiple Documents QnA.json b/packages/server/marketplaces/chatflows/Multiple Documents QnA.json index ff4d6b7f0..46fdc99b9 100644 --- a/packages/server/marketplaces/chatflows/Multiple Documents QnA.json +++ b/packages/server/marketplaces/chatflows/Multiple Documents QnA.json @@ -1,6 +1,5 @@ { "description": "Tool agent that can retrieve answers from multiple sources using relevant Retriever Tools", - "badge": "POPULAR", "usecases": ["Documents QnA"], "framework": ["Langchain"], "nodes": [ diff --git a/packages/server/marketplaces/chatflows/Multiple VectorDB.json b/packages/server/marketplaces/chatflows/Multiple VectorDB.json deleted file mode 100644 index d8e1e9e89..000000000 --- a/packages/server/marketplaces/chatflows/Multiple VectorDB.json +++ /dev/null @@ -1,1916 +0,0 @@ -{ - "description": "Conversational agent to choose between multiple Chain Tools, each connected to different vector databases", - "usecases": ["Documents QnA"], - "framework": ["Langchain"], - "nodes": [ - { - "width": 300, - "height": 603, - "id": "chainTool_2", - "position": { - "x": 1274.762717089282, - "y": -955.2604402500798 - }, - "type": "customNode", - "data": { - "id": "chainTool_2", - "label": "Chain Tool", - "version": 1, - "name": "chainTool", - "type": "ChainTool", - "baseClasses": ["ChainTool", "DynamicTool", "Tool", "StructuredTool", "BaseLangChain"], - "category": "Tools", - "description": "Use a chain as allowed tool for agent", - "inputParams": [ - { - "label": "Chain Name", - "name": "name", - "type": "string", - "placeholder": "state-of-union-qa", - "id": "chainTool_2-input-name-string" - }, - { - "label": "Chain Description", - "name": "description", - "type": "string", - "rows": 3, - "placeholder": "State of the Union QA - useful for when you need to ask questions about the most recent state of the union address.", - "id": "chainTool_2-input-description-string" - }, - { - "label": "Return Direct", - "name": "returnDirect", - "type": "boolean", - "optional": true, - "id": "chainTool_2-input-returnDirect-boolean" - } - ], - "inputAnchors": [ - { - "label": "Base Chain", - "name": "baseChain", - "type": "BaseChain", - "id": "chainTool_2-input-baseChain-BaseChain" - } - ], - "inputs": { - "name": "ai-paper-qa", - "description": "AI Paper QA - useful for when you need to ask questions about the AI-Generated Content paper.", - "returnDirect": true, - "baseChain": "{{retrievalQAChain_0.data.instance}}" - }, - "outputAnchors": [ - { - "id": "chainTool_2-output-chainTool-ChainTool|DynamicTool|Tool|StructuredTool|BaseLangChain", - "name": "chainTool", - "label": "ChainTool", - "type": "ChainTool | DynamicTool | Tool | StructuredTool | BaseLangChain" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 1274.762717089282, - "y": -955.2604402500798 - }, - "dragging": false - }, - { - "width": 300, - "height": 603, - "id": "chainTool_3", - "position": { - "x": 1278.5582632273515, - "y": -214.68611013834368 - }, - "type": "customNode", - "data": { - "id": "chainTool_3", - "label": "Chain Tool", - "version": 1, - "name": "chainTool", - "type": "ChainTool", - "baseClasses": ["ChainTool", "DynamicTool", "Tool", "StructuredTool", "BaseLangChain"], - "category": "Tools", - "description": "Use a chain as allowed tool for agent", - "inputParams": [ - { - "label": "Chain Name", - "name": "name", - "type": "string", - "placeholder": "state-of-union-qa", - "id": "chainTool_3-input-name-string" - }, - { - "label": "Chain Description", - "name": "description", - "type": "string", - "rows": 3, - "placeholder": "State of the Union QA - useful for when you need to ask questions about the most recent state of the union address.", - "id": "chainTool_3-input-description-string" - }, - { - "label": "Return Direct", - "name": "returnDirect", - "type": "boolean", - "optional": true, - "id": "chainTool_3-input-returnDirect-boolean" - } - ], - "inputAnchors": [ - { - "label": "Base Chain", - "name": "baseChain", - "type": "BaseChain", - "id": "chainTool_3-input-baseChain-BaseChain" - } - ], - "inputs": { - "name": "state-of-union-qa", - "description": "State of the Union QA - useful for when you need to ask questions about the president speech and most recent state of the union address.", - "returnDirect": true, - "baseChain": "{{retrievalQAChain_1.data.instance}}" - }, - "outputAnchors": [ - { - "id": "chainTool_3-output-chainTool-ChainTool|DynamicTool|Tool|StructuredTool|BaseLangChain", - "name": "chainTool", - "label": "ChainTool", - "type": "ChainTool | DynamicTool | Tool | StructuredTool | BaseLangChain" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "dragging": false, - "positionAbsolute": { - "x": 1278.5582632273515, - "y": -214.68611013834368 - } - }, - { - "width": 300, - "height": 332, - "id": "retrievalQAChain_0", - "position": { - "x": 898.1253096948574, - "y": -859.1174013418433 - }, - "type": "customNode", - "data": { - "id": "retrievalQAChain_0", - "label": "Retrieval QA Chain", - "version": 2, - "name": "retrievalQAChain", - "type": "RetrievalQAChain", - "baseClasses": ["RetrievalQAChain", "BaseChain", "BaseLangChain"], - "category": "Chains", - "description": "QA chain to answer a question based on the retrieved documents", - "inputParams": [], - "inputAnchors": [ - { - "label": "Language Model", - "name": "model", - "type": "BaseLanguageModel", - "id": "retrievalQAChain_0-input-model-BaseLanguageModel" - }, - { - "label": "Vector Store Retriever", - "name": "vectorStoreRetriever", - "type": "BaseRetriever", - "id": "retrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever" - }, - { - "label": "Input Moderation", - "description": "Detect text that could generate harmful output and prevent it from being sent to the language model", - "name": "inputModeration", - "type": "Moderation", - "optional": true, - "list": true, - "id": "retrievalQAChain_0-input-inputModeration-Moderation" - } - ], - "inputs": { - "inputModeration": "", - "model": "{{chatOpenAI_0.data.instance}}", - "vectorStoreRetriever": "{{redis_0.data.instance}}" - }, - "outputAnchors": [ - { - "id": "retrievalQAChain_0-output-retrievalQAChain-RetrievalQAChain|BaseChain|BaseLangChain", - "name": "retrievalQAChain", - "label": "RetrievalQAChain", - "type": "RetrievalQAChain | BaseChain | BaseLangChain" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 898.1253096948574, - "y": -859.1174013418433 - }, - "dragging": false - }, - { - "width": 300, - "height": 332, - "id": "retrievalQAChain_1", - "position": { - "x": 920.057949591115, - "y": 268.2828817441888 - }, - "type": "customNode", - "data": { - "id": "retrievalQAChain_1", - "label": "Retrieval QA Chain", - "version": 2, - "name": "retrievalQAChain", - "type": "RetrievalQAChain", - "baseClasses": ["RetrievalQAChain", "BaseChain", "BaseLangChain"], - "category": "Chains", - "description": "QA chain to answer a question based on the retrieved documents", - "inputParams": [], - "inputAnchors": [ - { - "label": "Language Model", - "name": "model", - "type": "BaseLanguageModel", - "id": "retrievalQAChain_1-input-model-BaseLanguageModel" - }, - { - "label": "Vector Store Retriever", - "name": "vectorStoreRetriever", - "type": "BaseRetriever", - "id": "retrievalQAChain_1-input-vectorStoreRetriever-BaseRetriever" - }, - { - "label": "Input Moderation", - "description": "Detect text that could generate harmful output and prevent it from being sent to the language model", - "name": "inputModeration", - "type": "Moderation", - "optional": true, - "list": true, - "id": "retrievalQAChain_1-input-inputModeration-Moderation" - } - ], - "inputs": { - "inputModeration": "", - "model": "{{chatOpenAI_1.data.instance}}", - "vectorStoreRetriever": "{{faiss_0.data.instance}}" - }, - "outputAnchors": [ - { - "id": "retrievalQAChain_1-output-retrievalQAChain-RetrievalQAChain|BaseChain|BaseLangChain", - "name": "retrievalQAChain", - "label": "RetrievalQAChain", - "type": "RetrievalQAChain | BaseChain | BaseLangChain" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 920.057949591115, - "y": 268.2828817441888 - }, - "dragging": false - }, - { - "width": 300, - "height": 424, - "id": "openAIEmbeddings_1", - "position": { - "x": 100.06006551346672, - "y": -686.9997729064416 - }, - "type": "customNode", - "data": { - "id": "openAIEmbeddings_1", - "label": "OpenAI Embeddings", - "version": 4, - "name": "openAIEmbeddings", - "type": "OpenAIEmbeddings", - "baseClasses": ["OpenAIEmbeddings", "Embeddings"], - "category": "Embeddings", - "description": "OpenAI API to generate embeddings for a given text", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["openAIApi"], - "id": "openAIEmbeddings_1-input-credential-credential" - }, - { - "label": "Model Name", - "name": "modelName", - "type": "asyncOptions", - "loadMethod": "listModels", - "default": "text-embedding-ada-002", - "id": "openAIEmbeddings_1-input-modelName-asyncOptions" - }, - { - "label": "Strip New Lines", - "name": "stripNewLines", - "type": "boolean", - "optional": true, - "additionalParams": true, - "id": "openAIEmbeddings_1-input-stripNewLines-boolean" - }, - { - "label": "Batch Size", - "name": "batchSize", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "openAIEmbeddings_1-input-batchSize-number" - }, - { - "label": "Timeout", - "name": "timeout", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "openAIEmbeddings_1-input-timeout-number" - }, - { - "label": "BasePath", - "name": "basepath", - "type": "string", - "optional": true, - "additionalParams": true, - "id": "openAIEmbeddings_1-input-basepath-string" - }, - { - "label": "Dimensions", - "name": "dimensions", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "openAIEmbeddings_1-input-dimensions-number" - } - ], - "inputAnchors": [], - "inputs": { - "modelName": "text-embedding-ada-002", - "stripNewLines": "", - "batchSize": "", - "timeout": "", - "basepath": "", - "dimensions": "" - }, - "outputAnchors": [ - { - "id": "openAIEmbeddings_1-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", - "name": "openAIEmbeddings", - "label": "OpenAIEmbeddings", - "description": "OpenAI API to generate embeddings for a given text", - "type": "OpenAIEmbeddings | Embeddings" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 100.06006551346672, - "y": -686.9997729064416 - }, - "dragging": false - }, - { - "width": 300, - "height": 424, - "id": "openAIEmbeddings_2", - "position": { - "x": 126.74109446437771, - "y": 542.6301053870723 - }, - "type": "customNode", - "data": { - "id": "openAIEmbeddings_2", - "label": "OpenAI Embeddings", - "version": 4, - "name": "openAIEmbeddings", - "type": "OpenAIEmbeddings", - "baseClasses": ["OpenAIEmbeddings", "Embeddings"], - "category": "Embeddings", - "description": "OpenAI API to generate embeddings for a given text", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["openAIApi"], - "id": "openAIEmbeddings_2-input-credential-credential" - }, - { - "label": "Model Name", - "name": "modelName", - "type": "asyncOptions", - "loadMethod": "listModels", - "default": "text-embedding-ada-002", - "id": "openAIEmbeddings_2-input-modelName-asyncOptions" - }, - { - "label": "Strip New Lines", - "name": "stripNewLines", - "type": "boolean", - "optional": true, - "additionalParams": true, - "id": "openAIEmbeddings_2-input-stripNewLines-boolean" - }, - { - "label": "Batch Size", - "name": "batchSize", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "openAIEmbeddings_2-input-batchSize-number" - }, - { - "label": "Timeout", - "name": "timeout", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "openAIEmbeddings_2-input-timeout-number" - }, - { - "label": "BasePath", - "name": "basepath", - "type": "string", - "optional": true, - "additionalParams": true, - "id": "openAIEmbeddings_2-input-basepath-string" - }, - { - "label": "Dimensions", - "name": "dimensions", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "openAIEmbeddings_2-input-dimensions-number" - } - ], - "inputAnchors": [], - "inputs": { - "modelName": "text-embedding-ada-002", - "stripNewLines": "", - "batchSize": "", - "timeout": "", - "basepath": "", - "dimensions": "" - }, - "outputAnchors": [ - { - "id": "openAIEmbeddings_2-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", - "name": "openAIEmbeddings", - "label": "OpenAIEmbeddings", - "description": "OpenAI API to generate embeddings for a given text", - "type": "OpenAIEmbeddings | Embeddings" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 126.74109446437771, - "y": 542.6301053870723 - }, - "dragging": false - }, - { - "width": 300, - "height": 670, - "id": "chatOpenAI_0", - "position": { - "x": 519.798956186608, - "y": -1601.3893918503904 - }, - "type": "customNode", - "data": { - "id": "chatOpenAI_0", - "label": "ChatOpenAI", - "version": 6, - "name": "chatOpenAI", - "type": "ChatOpenAI", - "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "Runnable"], - "category": "Chat Models", - "description": "Wrapper around OpenAI large language models that use the Chat endpoint", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["openAIApi"], - "id": "chatOpenAI_0-input-credential-credential" - }, - { - "label": "Model Name", - "name": "modelName", - "type": "asyncOptions", - "loadMethod": "listModels", - "default": "gpt-3.5-turbo", - "id": "chatOpenAI_0-input-modelName-options" - }, - { - "label": "Temperature", - "name": "temperature", - "type": "number", - "step": 0.1, - "default": 0.9, - "optional": true, - "id": "chatOpenAI_0-input-temperature-number" - }, - { - "label": "Max Tokens", - "name": "maxTokens", - "type": "number", - "step": 1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-maxTokens-number" - }, - { - "label": "Top Probability", - "name": "topP", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-topP-number" - }, - { - "label": "Frequency Penalty", - "name": "frequencyPenalty", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-frequencyPenalty-number" - }, - { - "label": "Presence Penalty", - "name": "presencePenalty", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-presencePenalty-number" - }, - { - "label": "Timeout", - "name": "timeout", - "type": "number", - "step": 1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-timeout-number" - }, - { - "label": "BasePath", - "name": "basepath", - "type": "string", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-basepath-string" - }, - { - "label": "BaseOptions", - "name": "baseOptions", - "type": "json", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-baseOptions-json" - }, - { - "label": "Allow Image Uploads", - "name": "allowImageUploads", - "type": "boolean", - "description": "Automatically uses gpt-4-vision-preview when image is being uploaded from chat. Only works with LLMChain, Conversation Chain, ReAct Agent, and Conversational Agent", - "default": false, - "optional": true, - "id": "chatOpenAI_0-input-allowImageUploads-boolean" - }, - { - "label": "Image Resolution", - "description": "This parameter controls the resolution in which the model views the image.", - "name": "imageResolution", - "type": "options", - "options": [ - { - "label": "Low", - "name": "low" - }, - { - "label": "High", - "name": "high" - }, - { - "label": "Auto", - "name": "auto" - } - ], - "default": "low", - "optional": false, - "additionalParams": true, - "id": "chatOpenAI_0-input-imageResolution-options" - } - ], - "inputAnchors": [ - { - "label": "Cache", - "name": "cache", - "type": "BaseCache", - "optional": true, - "id": "chatOpenAI_0-input-cache-BaseCache" - } - ], - "inputs": { - "cache": "", - "modelName": "gpt-3.5-turbo-16k", - "temperature": 0.9, - "maxTokens": "", - "topP": "", - "frequencyPenalty": "", - "presencePenalty": "", - "timeout": "", - "basepath": "", - "baseOptions": "", - "allowImageUploads": true, - "imageResolution": "low" - }, - "outputAnchors": [ - { - "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", - "name": "chatOpenAI", - "label": "ChatOpenAI", - "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | Runnable" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 519.798956186608, - "y": -1601.3893918503904 - }, - "dragging": false - }, - { - "width": 300, - "height": 652, - "id": "redis_0", - "position": { - "x": 517.9599892124863, - "y": -892.797784079465 - }, - "type": "customNode", - "data": { - "id": "redis_0", - "label": "Redis", - "version": 1, - "name": "redis", - "type": "Redis", - "baseClasses": ["Redis", "VectorStoreRetriever", "BaseRetriever"], - "category": "Vector Stores", - "description": "Upsert embedded data and perform similarity search upon query using Redis, an open source, in-memory data structure store", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["redisCacheUrlApi", "redisCacheApi"], - "id": "redis_0-input-credential-credential" - }, - { - "label": "Index Name", - "name": "indexName", - "placeholder": "", - "type": "string", - "id": "redis_0-input-indexName-string" - }, - { - "label": "Replace Index on Upsert", - "name": "replaceIndex", - "description": "Selecting this option will delete the existing index and recreate a new one when upserting", - "default": false, - "type": "boolean", - "id": "redis_0-input-replaceIndex-boolean" - }, - { - "label": "Content Field", - "name": "contentKey", - "description": "Name of the field (column) that contains the actual content", - "type": "string", - "default": "content", - "additionalParams": true, - "optional": true, - "id": "redis_0-input-contentKey-string" - }, - { - "label": "Metadata Field", - "name": "metadataKey", - "description": "Name of the field (column) that contains the metadata of the document", - "type": "string", - "default": "metadata", - "additionalParams": true, - "optional": true, - "id": "redis_0-input-metadataKey-string" - }, - { - "label": "Vector Field", - "name": "vectorKey", - "description": "Name of the field (column) that contains the vector", - "type": "string", - "default": "content_vector", - "additionalParams": true, - "optional": true, - "id": "redis_0-input-vectorKey-string" - }, - { - "label": "Top K", - "name": "topK", - "description": "Number of top results to fetch. Default to 4", - "placeholder": "4", - "type": "number", - "additionalParams": true, - "optional": true, - "id": "redis_0-input-topK-number" - } - ], - "inputAnchors": [ - { - "label": "Document", - "name": "document", - "type": "Document", - "list": true, - "optional": true, - "id": "redis_0-input-document-Document" - }, - { - "label": "Embeddings", - "name": "embeddings", - "type": "Embeddings", - "id": "redis_0-input-embeddings-Embeddings" - } - ], - "inputs": { - "document": ["{{plainText_0.data.instance}}"], - "embeddings": "{{openAIEmbeddings_1.data.instance}}", - "indexName": "redis-1234", - "replaceIndex": true, - "contentKey": "content", - "metadataKey": "metadata", - "vectorKey": "content_vector", - "topK": "" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "options": [ - { - "id": "redis_0-output-retriever-Redis|VectorStoreRetriever|BaseRetriever", - "name": "retriever", - "label": "Redis Retriever", - "type": "Redis | VectorStoreRetriever | BaseRetriever" - }, - { - "id": "redis_0-output-vectorStore-Redis|VectorStore", - "name": "vectorStore", - "label": "Redis Vector Store", - "type": "Redis | VectorStore" - } - ], - "default": "retriever" - } - ], - "outputs": { - "output": "retriever" - }, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 517.9599892124863, - "y": -892.797784079465 - }, - "dragging": false - }, - { - "width": 300, - "height": 459, - "id": "faiss_0", - "position": { - "x": 537.5298173812396, - "y": 545.504276022315 - }, - "type": "customNode", - "data": { - "id": "faiss_0", - "label": "Faiss", - "version": 1, - "name": "faiss", - "type": "Faiss", - "baseClasses": ["Faiss", "VectorStoreRetriever", "BaseRetriever"], - "category": "Vector Stores", - "description": "Upsert embedded data and perform similarity search upon query using Faiss library from Meta", - "inputParams": [ - { - "label": "Base Path to load", - "name": "basePath", - "description": "Path to load faiss.index file", - "placeholder": "C:\\Users\\User\\Desktop", - "type": "string", - "id": "faiss_0-input-basePath-string" - }, - { - "label": "Top K", - "name": "topK", - "description": "Number of top results to fetch. Default to 4", - "placeholder": "4", - "type": "number", - "additionalParams": true, - "optional": true, - "id": "faiss_0-input-topK-number" - } - ], - "inputAnchors": [ - { - "label": "Document", - "name": "document", - "type": "Document", - "list": true, - "optional": true, - "id": "faiss_0-input-document-Document" - }, - { - "label": "Embeddings", - "name": "embeddings", - "type": "Embeddings", - "id": "faiss_0-input-embeddings-Embeddings" - } - ], - "inputs": { - "document": ["{{plainText_1.data.instance}}"], - "embeddings": "{{openAIEmbeddings_2.data.instance}}", - "basePath": "C:\\Users\\user\\yourpath", - "topK": "" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "options": [ - { - "id": "faiss_0-output-retriever-Faiss|VectorStoreRetriever|BaseRetriever", - "name": "retriever", - "label": "Faiss Retriever", - "type": "Faiss | VectorStoreRetriever | BaseRetriever" - }, - { - "id": "faiss_0-output-vectorStore-Faiss|SaveableVectorStore|VectorStore", - "name": "vectorStore", - "label": "Faiss Vector Store", - "type": "Faiss | SaveableVectorStore | VectorStore" - } - ], - "default": "retriever" - } - ], - "outputs": { - "output": "retriever" - }, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 537.5298173812396, - "y": 545.504276022315 - }, - "dragging": false - }, - { - "width": 300, - "height": 487, - "id": "plainText_0", - "position": { - "x": 93.6260931892966, - "y": -1209.0760064103088 - }, - "type": "customNode", - "data": { - "id": "plainText_0", - "label": "Plain Text", - "version": 2, - "name": "plainText", - "type": "Document", - "baseClasses": ["Document"], - "category": "Document Loaders", - "description": "Load data from plain text", - "inputParams": [ - { - "label": "Text", - "name": "text", - "type": "string", - "rows": 4, - "placeholder": "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua...", - "id": "plainText_0-input-text-string" - }, - { - "label": "Metadata", - "name": "metadata", - "type": "json", - "optional": true, - "additionalParams": true, - "id": "plainText_0-input-metadata-json" - } - ], - "inputAnchors": [ - { - "label": "Text Splitter", - "name": "textSplitter", - "type": "TextSplitter", - "optional": true, - "id": "plainText_0-input-textSplitter-TextSplitter" - } - ], - "inputs": { - "text": "AI-generated content refers to text, images, videos, or other media produced by artificial intelligence algorithms. It leverages deep learning and natural language processing to create human-like content autonomously. AI-generated content has diverse applications, from automated customer support chatbots and personalized marketing to creative writing and art generation. While it offers efficiency and scalability, it also raises concerns about ethics, authenticity, and potential misuse. Striking a balance between harnessing its potential for productivity and addressing its ethical implications is crucial as AI-generated content continues to evolve and reshape industries.", - "textSplitter": "", - "metadata": "" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "options": [ - { - "id": "plainText_0-output-document-Document|json", - "name": "document", - "label": "Document", - "type": "Document | json" - }, - { - "id": "plainText_0-output-text-string|json", - "name": "text", - "label": "Text", - "type": "string | json" - } - ], - "default": "document" - } - ], - "outputs": { - "output": "document" - }, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 93.6260931892966, - "y": -1209.0760064103088 - }, - "dragging": false - }, - { - "width": 300, - "height": 670, - "id": "chatOpenAI_1", - "position": { - "x": 533.0416474070086, - "y": -168.63117374104695 - }, - "type": "customNode", - "data": { - "id": "chatOpenAI_1", - "label": "ChatOpenAI", - "version": 6, - "name": "chatOpenAI", - "type": "ChatOpenAI", - "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "Runnable"], - "category": "Chat Models", - "description": "Wrapper around OpenAI large language models that use the Chat endpoint", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["openAIApi"], - "id": "chatOpenAI_1-input-credential-credential" - }, - { - "label": "Model Name", - "name": "modelName", - "type": "asyncOptions", - "loadMethod": "listModels", - "default": "gpt-3.5-turbo", - "id": "chatOpenAI_1-input-modelName-options" - }, - { - "label": "Temperature", - "name": "temperature", - "type": "number", - "step": 0.1, - "default": 0.9, - "optional": true, - "id": "chatOpenAI_1-input-temperature-number" - }, - { - "label": "Max Tokens", - "name": "maxTokens", - "type": "number", - "step": 1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_1-input-maxTokens-number" - }, - { - "label": "Top Probability", - "name": "topP", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_1-input-topP-number" - }, - { - "label": "Frequency Penalty", - "name": "frequencyPenalty", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_1-input-frequencyPenalty-number" - }, - { - "label": "Presence Penalty", - "name": "presencePenalty", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_1-input-presencePenalty-number" - }, - { - "label": "Timeout", - "name": "timeout", - "type": "number", - "step": 1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_1-input-timeout-number" - }, - { - "label": "BasePath", - "name": "basepath", - "type": "string", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_1-input-basepath-string" - }, - { - "label": "BaseOptions", - "name": "baseOptions", - "type": "json", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_1-input-baseOptions-json" - }, - { - "label": "Allow Image Uploads", - "name": "allowImageUploads", - "type": "boolean", - "description": "Automatically uses gpt-4-vision-preview when image is being uploaded from chat. Only works with LLMChain, Conversation Chain, ReAct Agent, and Conversational Agent", - "default": false, - "optional": true, - "id": "chatOpenAI_1-input-allowImageUploads-boolean" - }, - { - "label": "Image Resolution", - "description": "This parameter controls the resolution in which the model views the image.", - "name": "imageResolution", - "type": "options", - "options": [ - { - "label": "Low", - "name": "low" - }, - { - "label": "High", - "name": "high" - }, - { - "label": "Auto", - "name": "auto" - } - ], - "default": "low", - "optional": false, - "additionalParams": true, - "id": "chatOpenAI_1-input-imageResolution-options" - } - ], - "inputAnchors": [ - { - "label": "Cache", - "name": "cache", - "type": "BaseCache", - "optional": true, - "id": "chatOpenAI_1-input-cache-BaseCache" - } - ], - "inputs": { - "cache": "", - "modelName": "gpt-3.5-turbo-16k", - "temperature": 0.9, - "maxTokens": "", - "topP": "", - "frequencyPenalty": "", - "presencePenalty": "", - "timeout": "", - "basepath": "", - "baseOptions": "", - "allowImageUploads": true, - "imageResolution": "low" - }, - "outputAnchors": [ - { - "id": "chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", - "name": "chatOpenAI", - "label": "ChatOpenAI", - "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | Runnable" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 533.0416474070086, - "y": -168.63117374104695 - }, - "dragging": false - }, - { - "width": 300, - "height": 253, - "id": "bufferMemory_0", - "position": { - "x": 2047.6821632337533, - "y": 429.48576006102945 - }, - "type": "customNode", - "data": { - "id": "bufferMemory_0", - "label": "Buffer Memory", - "version": 2, - "name": "bufferMemory", - "type": "BufferMemory", - "baseClasses": ["BufferMemory", "BaseChatMemory", "BaseMemory"], - "category": "Memory", - "description": "Retrieve chat messages stored in database", - "inputParams": [ - { - "label": "Session Id", - "name": "sessionId", - "type": "string", - "description": "If not specified, a random id will be used. Learn more", - "default": "", - "additionalParams": true, - "optional": true, - "id": "bufferMemory_0-input-sessionId-string" - }, - { - "label": "Memory Key", - "name": "memoryKey", - "type": "string", - "default": "chat_history", - "additionalParams": true, - "id": "bufferMemory_0-input-memoryKey-string" - } - ], - "inputAnchors": [], - "inputs": { - "sessionId": "", - "memoryKey": "chat_history" - }, - "outputAnchors": [ - { - "id": "bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory", - "name": "bufferMemory", - "label": "BufferMemory", - "type": "BufferMemory | BaseChatMemory | BaseMemory" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 2047.6821632337533, - "y": 429.48576006102945 - }, - "dragging": false - }, - { - "width": 300, - "height": 487, - "id": "plainText_1", - "position": { - "x": 117.23894449422778, - "y": 23.24339894687961 - }, - "type": "customNode", - "data": { - "id": "plainText_1", - "label": "Plain Text", - "version": 2, - "name": "plainText", - "type": "Document", - "baseClasses": ["Document"], - "category": "Document Loaders", - "description": "Load data from plain text", - "inputParams": [ - { - "label": "Text", - "name": "text", - "type": "string", - "rows": 4, - "placeholder": "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua...", - "id": "plainText_1-input-text-string" - }, - { - "label": "Metadata", - "name": "metadata", - "type": "json", - "optional": true, - "additionalParams": true, - "id": "plainText_1-input-metadata-json" - } - ], - "inputAnchors": [ - { - "label": "Text Splitter", - "name": "textSplitter", - "type": "TextSplitter", - "optional": true, - "id": "plainText_1-input-textSplitter-TextSplitter" - } - ], - "inputs": { - "text": "Madam Speaker, Madam Vice President, our First Lady and Second Gentleman. Members of Congress and the Cabinet. Justices of the Supreme Court. My fellow Americans. \n\nLast year COVID-19 kept us apart. This year we are finally together again. \n\nTonight, we meet as Democrats Republicans and Independents. But most importantly as Americans. \n\nWith a duty to one another to the American people to the Constitution. \n\nAnd with an unwavering resolve that freedom will always triumph over tyranny. \n\nSix days ago, Russiaโ€™s Vladimir Putin sought to shake the foundations of the free world thinking he could make it bend to his menacing ways. But he badly miscalculated. \n\nHe thought he could roll into Ukraine and the world would roll over. Instead he met a wall of strength he never imagined. \n\nHe met the Ukrainian people. \n\nFrom President Zelenskyy to every Ukrainian, their fearlessness, their courage, their determination, inspires the world. \n\nGroups of citizens blocking tanks with their bodies. Everyone from students to retirees teachers turned soldiers defending their homeland. \n\nIn this struggle as President Zelenskyy said in his speech to the European Parliament โ€œLight will win over darkness.โ€ The Ukrainian Ambassador to the United States is here tonight. \n\nLet each of us here tonight in this Chamber send an unmistakable signal to Ukraine and to the world. \n\nPlease rise if you are able and show that, Yes, we the United States of America stand with the Ukrainian people. \n\nThroughout our history weโ€™ve learned this lesson when dictators do not pay a price for their aggression they cause more chaos. \n\nThey keep moving. \n\nAnd the costs and the threats to America and the world keep rising. \n\nThatโ€™s why the NATO Alliance was created to secure peace and stability in Europe after World War 2. \n\nThe United States is a member along with 29 other nations. \n\nIt matters. American diplomacy matters. American resolve matters. \n\nPutinโ€™s latest attack on Ukraine was premeditated and unprovoked. \n\nHe rejected repeated efforts at diplomacy. \n\nHe thought the West and NATO wouldnโ€™t respond. And he thought he could divide us at home. Putin was wrong. We were ready. Here is what we did. \n\nWe prepared extensively and carefully. \n\nWe spent months building a coalition of other freedom-loving nations from Europe and the Americas to Asia and Africa to confront Putin. \n\nI spent countless hours unifying our European allies. We shared with the world in advance what we knew Putin was planning and precisely how he would try to falsely justify his aggression. \n\nWe countered Russiaโ€™s lies with truth. \n\nAnd now that he has acted the free world is holding him accountable. \n\nAlong with twenty-seven members of the European Union including France, Germany, Italy, as well as countries like the United Kingdom, Canada, Japan, Korea, Australia, New Zealand, and many others, even Switzerland. \n\nWe are inflicting pain on Russia and supporting the people of Ukraine. Putin is now isolated from the world more than ever. \n\nTogether with our allies โ€“we are right now enforcing powerful economic sanctions. \n\nWe are cutting off Russiaโ€™s largest banks from the international financial system. \n\nPreventing Russiaโ€™s central bank from defending the Russian Ruble making Putinโ€™s $630 Billion โ€œwar fundโ€ worthless. \n\nWe are choking off Russiaโ€™s access to technology that will sap its economic strength and weaken its military for years to come. \n\nTonight I say to the Russian oligarchs and corrupt leaders who have bilked billions of dollars off this violent regime no more. \n\nThe U.S. Department of Justice is assembling a dedicated task force to go after the crimes of Russian oligarchs. \n\nWe are joining with our European allies to find and seize your yachts your luxury apartments your private jets. We are coming for your ill-begotten gains. \n\nAnd tonight I am announcing that we will join our allies in closing off American air space to all Russian flights โ€“ further isolating Russia โ€“ and adding an additional squeeze โ€“on their economy. The Ruble has lost 30% of its value. \n\nThe Russian stock market has lost 40% of its value and trading remains suspended. Russiaโ€™s economy is reeling and Putin alone is to blame. \n\nTogether with our allies we are providing support to the Ukrainians in their fight for freedom. Military assistance. Economic assistance. Humanitarian assistance. \n\nWe are giving more than $1 Billion in direct assistance to Ukraine. \n\nAnd we will continue to aid the Ukrainian people as they defend their country and to help ease their suffering. \n\nLet me be clear, our forces are not engaged and will not engage in conflict with Russian forces in Ukraine. \n\nOur forces are not going to Europe to fight in Ukraine, but to defend our NATO Allies โ€“ in the event that Putin decides to keep moving west. \n\nFor that purpose weโ€™ve mobilized American ground forces, air squadrons, and ship deployments to protect NATO countries including Poland, Romania, Latvia, Lithuania, and Estonia. \n\nAs I have made crystal clear the United States and our Allies will defend every inch of territory of NATO countries with the full force of our collective power. \n\nAnd we remain clear-eyed. The Ukrainians are fighting back with pure courage. But the next few days weeks, months, will be hard on them. \n\nPutin has unleashed violence and chaos. But while he may make gains on the battlefield โ€“ he will pay a continuing high price over the long run. \n\nAnd a proud Ukrainian people, who have known 30 years of independence, have repeatedly shown that they will not tolerate anyone who tries to take their country backwards. \n\nTo all Americans, I will be honest with you, as Iโ€™ve always promised. A Russian dictator, invading a foreign country, has costs around the world. \n\nAnd Iโ€™m taking robust action to make sure the pain of our sanctions is targeted at Russiaโ€™s economy. And I will use every tool at our disposal to protect American businesses and consumers. \n\nTonight, I can announce that the United States has worked with 30 other countries to release 60 Million barrels of oil from reserves around the world. \n\nAmerica will lead that effort, releasing 30 Million barrels from our own Strategic Petroleum Reserve. And we stand ready to do more if necessary, unified with our allies. \n\nThese steps will help blunt gas prices here at home. And I know the news about whatโ€™s happening can seem alarming. \n\nBut I want you to know that we are going to be okay. \n\nWhen the history of this era is written Putinโ€™s war on Ukraine will have left Russia weaker and the rest of the world stronger. \n\nWhile it shouldnโ€™t have taken something so terrible for people around the world to see whatโ€™s at stake now everyone sees it clearly. \n\nWe see the unity among leaders of nations and a more unified Europe a more unified West. And we see unity among the people who are gathering in cities in large crowds around the world even in Russia to demonstrate their support for Ukraine. \n\nIn the battle between democracy and autocracy, democracies are rising to the moment, and the world is clearly choosing the side of peace and security. \n\nThis is a real test. Itโ€™s going to take time. So let us continue to draw inspiration from the iron will of the Ukrainian people. \n\nTo our fellow Ukrainian Americans who forge a deep bond that connects our two nations we stand with you. \n\nPutin may circle Kyiv with tanks, but he will never gain the hearts and souls of the Ukrainian people. \n\nHe will never extinguish their love of freedom. He will never weaken the resolve of the free world. \n\nWe meet tonight in an America that has lived through two of the hardest years this nation has ever faced. \n\nThe pandemic has been punishing. \n\nAnd so many families are living paycheck to paycheck, struggling to keep up with the rising cost of food, gas, housing, and so much more. \n\nI understand. \n\nI remember when my Dad had to leave our home in Scranton, Pennsylvania to find work. I grew up in a family where if the price of food went up, you felt it. \n\nThatโ€™s why one of the first things I did as President was fight to pass the American Rescue Plan. \n\nBecause people were hurting. We needed to act, and we did. \n\nFew pieces of legislation have done more in a critical moment in our history to lift us out of crisis. \n\nIt fueled our efforts to vaccinate the nation and combat COVID-19. It delivered immediate economic relief for tens of millions of Americans. \n\nHelped put food on their table, keep a roof over their heads, and cut the cost of health insurance. \n\nAnd as my Dad used to say, it gave people a little breathing room. \n\nAnd unlike the $2 Trillion tax cut passed in the previous administration that benefitted the top 1% of Americans, the American Rescue Plan helped working peopleโ€”and left no one behind. \n\nAnd it worked. It created jobs. Lots of jobs. \n\nIn factโ€”our economy created over 6.5 Million new jobs just last year, more jobs created in one year \nthan ever before in the history of America. \n\nOur economy grew at a rate of 5.7% last year, the strongest growth in nearly 40 years, the first step in bringing fundamental change to an economy that hasnโ€™t worked for the working people of this nation for too long. \n\nFor the past 40 years we were told that if we gave tax breaks to those at the very top, the benefits would trickle down to everyone else. \n\nBut that trickle-down theory led to weaker economic growth, lower wages, bigger deficits, and the widest gap between those at the top and everyone else in nearly a century. \n\nVice President Harris and I ran for office with a new economic vision for America. \n\nInvest in America. Educate Americans. Grow the workforce. Build the economy from the bottom up \nand the middle out, not from the top down. \n\nBecause we know that when the middle class grows, the poor have a ladder up and the wealthy do very well. \n\nAmerica used to have the best roads, bridges, and airports on Earth. \n\nNow our infrastructure is ranked 13th in the world. \n\nWe wonโ€™t be able to compete for the jobs of the 21st Century if we donโ€™t fix that. \n\nThatโ€™s why it was so important to pass the Bipartisan Infrastructure Lawโ€”the most sweeping investment to rebuild America in history. \n\nThis was a bipartisan effort, and I want to thank the members of both parties who worked to make it happen. \n\nWeโ€™re done talking about infrastructure weeks. \n\nWeโ€™re going to have an infrastructure decade. \n\nIt is going to transform America and put us on a path to win the economic competition of the 21st Century that we face with the rest of the worldโ€”particularly with China. \n\nAs Iโ€™ve told Xi Jinping, it is never a good bet to bet against the American people. \n\nWeโ€™ll create good jobs for millions of Americans, modernizing roads, airports, ports, and waterways all across America. \n\nAnd weโ€™ll do it all to withstand the devastating effects of the climate crisis and promote environmental justice. \n\nWeโ€™ll build a national network of 500,000 electric vehicle charging stations, begin to replace poisonous lead pipesโ€”so every childโ€”and every Americanโ€”has clean water to drink at home and at school, provide affordable high-speed internet for every Americanโ€”urban, suburban, rural, and tribal communities. \n\n4,000 projects have already been announced. \n\nAnd tonight, Iโ€™m announcing that this year we will start fixing over 65,000 miles of highway and 1,500 bridges in disrepair. \n\nWhen we use taxpayer dollars to rebuild America โ€“ we are going to Buy American: buy American products to support American jobs. \n\nThe federal government spends about $600 Billion a year to keep the country safe and secure. \n\nThereโ€™s been a law on the books for almost a century \nto make sure taxpayersโ€™ dollars support American jobs and businesses. \n\nEvery Administration says theyโ€™ll do it, but we are actually doing it. \n\nWe will buy American to make sure everything from the deck of an aircraft carrier to the steel on highway guardrails are made in America. \n\nBut to compete for the best jobs of the future, we also need to level the playing field with China and other competitors. \n\nThatโ€™s why it is so important to pass the Bipartisan Innovation Act sitting in Congress that will make record investments in emerging technologies and American manufacturing. \n\nLet me give you one example of why itโ€™s so important to pass it. \n\nIf you travel 20 miles east of Columbus, Ohio, youโ€™ll find 1,000 empty acres of land. \n\nIt wonโ€™t look like much, but if you stop and look closely, youโ€™ll see a โ€œField of dreams,โ€ the ground on which Americaโ€™s future will be built. \n\nThis is where Intel, the American company that helped build Silicon Valley, is going to build its $20 billion semiconductor โ€œmega siteโ€. \n\nUp to eight state-of-the-art factories in one place. 10,000 new good-paying jobs. \n\nSome of the most sophisticated manufacturing in the world to make computer chips the size of a fingertip that power the world and our everyday lives. \n\nSmartphones. The Internet. Technology we have yet to invent. \n\nBut thatโ€™s just the beginning. \n\nIntelโ€™s CEO, Pat Gelsinger, who is here tonight, told me they are ready to increase their investment from \n$20 billion to $100 billion. \n\nThat would be one of the biggest investments in manufacturing in American history. \n\nAnd all theyโ€™re waiting for is for you to pass this bill. \n\nSo letโ€™s not wait any longer. Send it to my desk. Iโ€™ll sign it. \n\nAnd we will really take off. \n\nAnd Intel is not alone. \n\nThereโ€™s something happening in America. \n\nJust look around and youโ€™ll see an amazing story. \n\nThe rebirth of the pride that comes from stamping products โ€œMade In America.โ€ The revitalization of American manufacturing. \n\nCompanies are choosing to build new factories here, when just a few years ago, they would have built them overseas. \n\nThatโ€™s what is happening. Ford is investing $11 billion to build electric vehicles, creating 11,000 jobs across the country. \n\nGM is making the largest investment in its historyโ€”$7 billion to build electric vehicles, creating 4,000 jobs in Michigan. \n\nAll told, we created 369,000 new manufacturing jobs in America just last year. \n\nPowered by people Iโ€™ve met like JoJo Burgess, from generations of union steelworkers from Pittsburgh, whoโ€™s here with us tonight. \n\nAs Ohio Senator Sherrod Brown says, โ€œItโ€™s time to bury the label โ€œRust Belt.โ€ \n\nItโ€™s time. \n\nBut with all the bright spots in our economy, record job growth and higher wages, too many families are struggling to keep up with the bills. \n\nInflation is robbing them of the gains they might otherwise feel. \n\nI get it. Thatโ€™s why my top priority is getting prices under control. \n\nLook, our economy roared back faster than most predicted, but the pandemic meant that businesses had a hard time hiring enough workers to keep up production in their factories. \n\nThe pandemic also disrupted global supply chains. \n\nWhen factories close, it takes longer to make goods and get them from the warehouse to the store, and prices go up. \n\nLook at cars. \n\nLast year, there werenโ€™t enough semiconductors to make all the cars that people wanted to buy. \n\nAnd guess what, prices of automobiles went up. \n\nSoโ€”we have a choice. \n\nOne way to fight inflation is to drive down wages and make Americans poorer. \n\nI have a better plan to fight inflation. \n\nLower your costs, not your wages. \n\nMake more cars and semiconductors in America. \n\nMore infrastructure and innovation in America. \n\nMore goods moving faster and cheaper in America. \n\nMore jobs where you can earn a good living in America. \n\nAnd instead of relying on foreign supply chains, letโ€™s make it in America. \n\nEconomists call it โ€œincreasing the productive capacity of our economy.โ€ \n\nI call it building a better America. \n\nMy plan to fight inflation will lower your costs and lower the deficit. \n\n17 Nobel laureates in economics say my plan will ease long-term inflationary pressures. Top business leaders and most Americans support my plan. And hereโ€™s the plan: \n\nFirst โ€“ cut the cost of prescription drugs. Just look at insulin. One in ten Americans has diabetes. In Virginia, I met a 13-year-old boy named Joshua Davis. \n\nHe and his Dad both have Type 1 diabetes, which means they need insulin every day. Insulin costs about $10 a vial to make. \n\nBut drug companies charge families like Joshua and his Dad up to 30 times more. I spoke with Joshuaโ€™s mom. \n\nImagine what itโ€™s like to look at your child who needs insulin and have no idea how youโ€™re going to pay for it. \n\nWhat it does to your dignity, your ability to look your child in the eye, to be the parent you expect to be. \n\nJoshua is here with us tonight. Yesterday was his birthday. Happy birthday, buddy. \n\nFor Joshua, and for the 200,000 other young people with Type 1 diabetes, letโ€™s cap the cost of insulin at $35 a month so everyone can afford it. \n\nDrug companies will still do very well. And while weโ€™re at it let Medicare negotiate lower prices for prescription drugs, like the VA already does. \n\nLook, the American Rescue Plan is helping millions of families on Affordable Care Act plans save $2,400 a year on their health care premiums. Letโ€™s close the coverage gap and make those savings permanent. \n\nSecond โ€“ cut energy costs for families an average of $500 a year by combatting climate change. \n\nLetโ€™s provide investments and tax credits to weatherize your homes and businesses to be energy efficient and you get a tax credit; double Americaโ€™s clean energy production in solar, wind, and so much more; lower the price of electric vehicles, saving you another $80 a month because youโ€™ll never have to pay at the gas pump again. \n\nThird โ€“ cut the cost of child care. Many families pay up to $14,000 a year for child care per child. \n\nMiddle-class and working families shouldnโ€™t have to pay more than 7% of their income for care of young children. \n\nMy plan will cut the cost in half for most families and help parents, including millions of women, who left the workforce during the pandemic because they couldnโ€™t afford child care, to be able to get back to work. \n\nMy plan doesnโ€™t stop there. It also includes home and long-term care. More affordable housing. And Pre-K for every 3- and 4-year-old. \n\nAll of these will lower costs. \n\nAnd under my plan, nobody earning less than $400,000 a year will pay an additional penny in new taxes. Nobody. \n\nThe one thing all Americans agree on is that the tax system is not fair. We have to fix it. \n\nIโ€™m not looking to punish anyone. But letโ€™s make sure corporations and the wealthiest Americans start paying their fair share. \n\nJust last year, 55 Fortune 500 corporations earned $40 billion in profits and paid zero dollars in federal income tax. \n\nThatโ€™s simply not fair. Thatโ€™s why Iโ€™ve proposed a 15% minimum tax rate for corporations. \n\nWe got more than 130 countries to agree on a global minimum tax rate so companies canโ€™t get out of paying their taxes at home by shipping jobs and factories overseas. \n\nThatโ€™s why Iโ€™ve proposed closing loopholes so the very wealthy donโ€™t pay a lower tax rate than a teacher or a firefighter. \n\nSo thatโ€™s my plan. It will grow the economy and lower costs for families. \n\nSo what are we waiting for? Letโ€™s get this done. And while youโ€™re at it, confirm my nominees to the Federal Reserve, which plays a critical role in fighting inflation. \n\nMy plan will not only lower costs to give families a fair shot, it will lower the deficit. \n\nThe previous Administration not only ballooned the deficit with tax cuts for the very wealthy and corporations, it undermined the watchdogs whose job was to keep pandemic relief funds from being wasted. \n\nBut in my administration, the watchdogs have been welcomed back. \n\nWeโ€™re going after the criminals who stole billions in relief money meant for small businesses and millions of Americans. \n\nAnd tonight, Iโ€™m announcing that the Justice Department will name a chief prosecutor for pandemic fraud. \n\nBy the end of this year, the deficit will be down to less than half what it was before I took office. \n\nThe only president ever to cut the deficit by more than one trillion dollars in a single year. \n\nLowering your costs also means demanding more competition. \n\nIโ€™m a capitalist, but capitalism without competition isnโ€™t capitalism. \n\nItโ€™s exploitationโ€”and it drives up prices. \n\nWhen corporations donโ€™t have to compete, their profits go up, your prices go up, and small businesses and family farmers and ranchers go under. \n\nWe see it happening with ocean carriers moving goods in and out of America. \n\nDuring the pandemic, these foreign-owned companies raised prices by as much as 1,000% and made record profits. \n\nTonight, Iโ€™m announcing a crackdown on these companies overcharging American businesses and consumers. \n\nAnd as Wall Street firms take over more nursing homes, quality in those homes has gone down and costs have gone up. \n\nThat ends on my watch. \n\nMedicare is going to set higher standards for nursing homes and make sure your loved ones get the care they deserve and expect. \n\nWeโ€™ll also cut costs and keep the economy going strong by giving workers a fair shot, provide more training and apprenticeships, hire them based on their skills not degrees. \n\nLetโ€™s pass the Paycheck Fairness Act and paid leave. \n\nRaise the minimum wage to $15 an hour and extend the Child Tax Credit, so no one has to raise a family in poverty. \n\nLetโ€™s increase Pell Grants and increase our historic support of HBCUs, and invest in what Jillโ€”our First Lady who teaches full-timeโ€”calls Americaโ€™s best-kept secret: community colleges. \n\nAnd letโ€™s pass the PRO Act when a majority of workers want to form a unionโ€”they shouldnโ€™t be stopped. \n\nWhen we invest in our workers, when we build the economy from the bottom up and the middle out together, we can do something we havenโ€™t done in a long time: build a better America. \n\nFor more than two years, COVID-19 has impacted every decision in our lives and the life of the nation. \n\nAnd I know youโ€™re tired, frustrated, and exhausted. \n\nBut I also know this. \n\nBecause of the progress weโ€™ve made, because of your resilience and the tools we have, tonight I can say \nwe are moving forward safely, back to more normal routines. \n\nWeโ€™ve reached a new moment in the fight against COVID-19, with severe cases down to a level not seen since last July. \n\nJust a few days ago, the Centers for Disease Control and Preventionโ€”the CDCโ€”issued new mask guidelines. \n\nUnder these new guidelines, most Americans in most of the country can now be mask free. \n\nAnd based on the projections, more of the country will reach that point across the next couple of weeks. \n\nThanks to the progress we have made this past year, COVID-19 need no longer control our lives. \n\nI know some are talking about โ€œliving with COVID-19โ€. Tonight โ€“ I say that we will never just accept living with COVID-19. \n\nWe will continue to combat the virus as we do other diseases. And because this is a virus that mutates and spreads, we will stay on guard. \n\nHere are four common sense steps as we move forward safely. \n\nFirst, stay protected with vaccines and treatments. We know how incredibly effective vaccines are. If youโ€™re vaccinated and boosted you have the highest degree of protection. \n\nWe will never give up on vaccinating more Americans. Now, I know parents with kids under 5 are eager to see a vaccine authorized for their children. \n\nThe scientists are working hard to get that done and weโ€™ll be ready with plenty of vaccines when they do. \n\nWeโ€™re also ready with anti-viral treatments. If you get COVID-19, the Pfizer pill reduces your chances of ending up in the hospital by 90%. \n\nWeโ€™ve ordered more of these pills than anyone in the world. And Pfizer is working overtime to get us 1 Million pills this month and more than double that next month. \n\nAnd weโ€™re launching the โ€œTest to Treatโ€ initiative so people can get tested at a pharmacy, and if theyโ€™re positive, receive antiviral pills on the spot at no cost. \n\nIf youโ€™re immunocompromised or have some other vulnerability, we have treatments and free high-quality masks. \n\nWeโ€™re leaving no one behind or ignoring anyoneโ€™s needs as we move forward. \n\nAnd on testing, we have made hundreds of millions of tests available for you to order for free. \n\nEven if you already ordered free tests tonight, I am announcing that you can order more from covidtests.gov starting next week. \n\nSecond โ€“ we must prepare for new variants. Over the past year, weโ€™ve gotten much better at detecting new variants. \n\nIf necessary, weโ€™ll be able to deploy new vaccines within 100 days instead of many more months or years. \n\nAnd, if Congress provides the funds we need, weโ€™ll have new stockpiles of tests, masks, and pills ready if needed. \n\nI cannot promise a new variant wonโ€™t come. But I can promise you weโ€™ll do everything within our power to be ready if it does. \n\nThird โ€“ we can end the shutdown of schools and businesses. We have the tools we need. \n\nItโ€™s time for Americans to get back to work and fill our great downtowns again. People working from home can feel safe to begin to return to the office. \n\nWeโ€™re doing that here in the federal government. The vast majority of federal workers will once again work in person. \n\nOur schools are open. Letโ€™s keep it that way. Our kids need to be in school. \n\nAnd with 75% of adult Americans fully vaccinated and hospitalizations down by 77%, most Americans can remove their masks, return to work, stay in the classroom, and move forward safely. \n\nWe achieved this because we provided free vaccines, treatments, tests, and masks. \n\nOf course, continuing this costs money. \n\nI will soon send Congress a request. \n\nThe vast majority of Americans have used these tools and may want to again, so I expect Congress to pass it quickly. \n\nFourth, we will continue vaccinating the world. \n\nWeโ€™ve sent 475 Million vaccine doses to 112 countries, more than any other nation. \n\nAnd we wonโ€™t stop. \n\nWe have lost so much to COVID-19. Time with one another. And worst of all, so much loss of life. \n\nLetโ€™s use this moment to reset. Letโ€™s stop looking at COVID-19 as a partisan dividing line and see it for what it is: A God-awful disease. \n\nLetโ€™s stop seeing each other as enemies, and start seeing each other for who we really are: Fellow Americans. \n\nWe canโ€™t change how divided weโ€™ve been. But we can change how we move forwardโ€”on COVID-19 and other issues we must face together. \n\nI recently visited the New York City Police Department days after the funerals of Officer Wilbert Mora and his partner, Officer Jason Rivera. \n\nThey were responding to a 9-1-1 call when a man shot and killed them with a stolen gun. \n\nOfficer Mora was 27 years old. \n\nOfficer Rivera was 22. \n\nBoth Dominican Americans whoโ€™d grown up on the same streets they later chose to patrol as police officers. \n\nI spoke with their families and told them that we are forever in debt for their sacrifice, and we will carry on their mission to restore the trust and safety every community deserves. \n\nIโ€™ve worked on these issues a long time. \n\nI know what works: Investing in crime preventionand community police officers whoโ€™ll walk the beat, whoโ€™ll know the neighborhood, and who can restore trust and safety. \n\nSo letโ€™s not abandon our streets. Or choose between safety and equal justice. \n\nLetโ€™s come together to protect our communities, restore trust, and hold law enforcement accountable. \n\nThatโ€™s why the Justice Department required body cameras, banned chokeholds, and restricted no-knock warrants for its officers. \n\nThatโ€™s why the American Rescue Plan provided $350 Billion that cities, states, and counties can use to hire more police and invest in proven strategies like community violence interruptionโ€”trusted messengers breaking the cycle of violence and trauma and giving young people hope. \n\nWe should all agree: The answer is not to Defund the police. The answer is to FUND the police with the resources and training they need to protect our communities. \n\nI ask Democrats and Republicans alike: Pass my budget and keep our neighborhoods safe. \n\nAnd I will keep doing everything in my power to crack down on gun trafficking and ghost guns you can buy online and make at homeโ€”they have no serial numbers and canโ€™t be traced. \n\nAnd I ask Congress to pass proven measures to reduce gun violence. Pass universal background checks. Why should anyone on a terrorist list be able to purchase a weapon? \n\nBan assault weapons and high-capacity magazines. \n\nRepeal the liability shield that makes gun manufacturers the only industry in America that canโ€™t be sued. \n\nThese laws donโ€™t infringe on the Second Amendment. They save lives. \n\nThe most fundamental right in America is the right to vote โ€“ and to have it counted. And itโ€™s under assault. \n\nIn state after state, new laws have been passed, not only to suppress the vote, but to subvert entire elections. \n\nWe cannot let this happen. \n\nTonight. I call on the Senate to: Pass the Freedom to Vote Act. Pass the John Lewis Voting Rights Act. And while youโ€™re at it, pass the Disclose Act so Americans can know who is funding our elections. \n\nTonight, Iโ€™d like to honor someone who has dedicated his life to serve this country: Justice Stephen Breyerโ€”an Army veteran, Constitutional scholar, and retiring Justice of the United States Supreme Court. Justice Breyer, thank you for your service. \n\nOne of the most serious constitutional responsibilities a President has is nominating someone to serve on the United States Supreme Court. \n\nAnd I did that 4 days ago, when I nominated Circuit Court of Appeals Judge Ketanji Brown Jackson. One of our nationโ€™s top legal minds, who will continue Justice Breyerโ€™s legacy of excellence. \n\nA former top litigator in private practice. A former federal public defender. And from a family of public school educators and police officers. A consensus builder. Since sheโ€™s been nominated, sheโ€™s received a broad range of supportโ€”from the Fraternal Order of Police to former judges appointed by Democrats and Republicans. \n\nAnd if we are to advance liberty and justice, we need to secure the Border and fix the immigration system. \n\nWe can do both. At our border, weโ€™ve installed new technology like cutting-edge scanners to better detect drug smuggling. \n\nWeโ€™ve set up joint patrols with Mexico and Guatemala to catch more human traffickers. \n\nWeโ€™re putting in place dedicated immigration judges so families fleeing persecution and violence can have their cases heard faster. \n\nWeโ€™re securing commitments and supporting partners in South and Central America to host more refugees and secure their own borders. \n\nWe can do all this while keeping lit the torch of liberty that has led generations of immigrants to this landโ€”my forefathers and so many of yours. \n\nProvide a pathway to citizenship for Dreamers, those on temporary status, farm workers, and essential workers. \n\nRevise our laws so businesses have the workers they need and families donโ€™t wait decades to reunite. \n\nItโ€™s not only the right thing to doโ€”itโ€™s the economically smart thing to do. \n\nThatโ€™s why immigration reform is supported by everyone from labor unions to religious leaders to the U.S. Chamber of Commerce. \n\nLetโ€™s get it done once and for all. \n\nAdvancing liberty and justice also requires protecting the rights of women. \n\nThe constitutional right affirmed in Roe v. Wadeโ€”standing precedent for half a centuryโ€”is under attack as never before. \n\nIf we want to go forwardโ€”not backwardโ€”we must protect access to health care. Preserve a womanโ€™s right to choose. And letโ€™s continue to advance maternal health care in America. \n\nAnd for our LGBTQ+ Americans, letโ€™s finally get the bipartisan Equality Act to my desk. The onslaught of state laws targeting transgender Americans and their families is wrong. \n\nAs I said last year, especially to our younger transgender Americans, I will always have your back as your President, so you can be yourself and reach your God-given potential. \n\nWhile it often appears that we never agree, that isnโ€™t true. I signed 80 bipartisan bills into law last year. From preventing government shutdowns to protecting Asian-Americans from still-too-common hate crimes to reforming military justice. \n\nAnd soon, weโ€™ll strengthen the Violence Against Women Act that I first wrote three decades ago. It is important for us to show the nation that we can come together and do big things. \n\nSo tonight Iโ€™m offering a Unity Agenda for the Nation. Four big things we can do together. \n\nFirst, beat the opioid epidemic. \n\nThere is so much we can do. Increase funding for prevention, treatment, harm reduction, and recovery. \n\nGet rid of outdated rules that stop doctors from prescribing treatments. And stop the flow of illicit drugs by working with state and local law enforcement to go after traffickers. \n\nIf youโ€™re suffering from addiction, know you are not alone. I believe in recovery, and I celebrate the 23 million Americans in recovery. \n\nSecond, letโ€™s take on mental health. Especially among our children, whose lives and education have been turned upside down. \n\nThe American Rescue Plan gave schools money to hire teachers and help students make up for lost learning. \n\nI urge every parent to make sure your school does just that. And we can all play a partโ€”sign up to be a tutor or a mentor. \n\nChildren were also struggling before the pandemic. Bullying, violence, trauma, and the harms of social media. \n\nAs Frances Haugen, who is here with us tonight, has shown, we must hold social media platforms accountable for the national experiment theyโ€™re conducting on our children for profit. \n\nItโ€™s time to strengthen privacy protections, ban targeted advertising to children, demand tech companies stop collecting personal data on our children. \n\nAnd letโ€™s get all Americans the mental health services they need. More people they can turn to for help, and full parity between physical and mental health care. \n\nThird, support our veterans. \n\nVeterans are the best of us. \n\nIโ€™ve always believed that we have a sacred obligation to equip all those we send to war and care for them and their families when they come home. \n\nMy administration is providing assistance with job training and housing, and now helping lower-income veterans get VA care debt-free. \n\nOur troops in Iraq and Afghanistan faced many dangers. \n\nOne was stationed at bases and breathing in toxic smoke from โ€œburn pitsโ€ that incinerated wastes of warโ€”medical and hazard material, jet fuel, and more. \n\nWhen they came home, many of the worldโ€™s fittest and best trained warriors were never the same. \n\nHeadaches. Numbness. Dizziness. \n\nA cancer that would put them in a flag-draped coffin. \n\nI know. \n\nOne of those soldiers was my son Major Beau Biden. \n\nWe donโ€™t know for sure if a burn pit was the cause of his brain cancer, or the diseases of so many of our troops. \n\nBut Iโ€™m committed to finding out everything we can. \n\nCommitted to military families like Danielle Robinson from Ohio. \n\nThe widow of Sergeant First Class Heath Robinson. \n\nHe was born a soldier. Army National Guard. Combat medic in Kosovo and Iraq. \n\nStationed near Baghdad, just yards from burn pits the size of football fields. \n\nHeathโ€™s widow Danielle is here with us tonight. They loved going to Ohio State football games. He loved building Legos with their daughter. \n\nBut cancer from prolonged exposure to burn pits ravaged Heathโ€™s lungs and body. \n\nDanielle says Heath was a fighter to the very end. \n\nHe didnโ€™t know how to stop fighting, and neither did she. \n\nThrough her pain she found purpose to demand we do better. \n\nTonight, Danielleโ€”we are. \n\nThe VA is pioneering new ways of linking toxic exposures to diseases, already helping more veterans get benefits. \n\nAnd tonight, Iโ€™m announcing weโ€™re expanding eligibility to veterans suffering from nine respiratory cancers. \n\nIโ€™m also calling on Congress: pass a law to make sure veterans devastated by toxic exposures in Iraq and Afghanistan finally get the benefits and comprehensive health care they deserve. \n\nAnd fourth, letโ€™s end cancer as we know it. \n\nThis is personal to me and Jill, to Kamala, and to so many of you. \n\nCancer is the #2 cause of death in Americaโ€“second only to heart disease. \n\nLast month, I announced our plan to supercharge \nthe Cancer Moonshot that President Obama asked me to lead six years ago. \n\nOur goal is to cut the cancer death rate by at least 50% over the next 25 years, turn more cancers from death sentences into treatable diseases. \n\nMore support for patients and families. \n\nTo get there, I call on Congress to fund ARPA-H, the Advanced Research Projects Agency for Health. \n\nItโ€™s based on DARPAโ€”the Defense Department project that led to the Internet, GPS, and so much more. \n\nARPA-H will have a singular purposeโ€”to drive breakthroughs in cancer, Alzheimerโ€™s, diabetes, and more. \n\nA unity agenda for the nation. \n\nWe can do this. \n\nMy fellow Americansโ€”tonight , we have gathered in a sacred spaceโ€”the citadel of our democracy. \n\nIn this Capitol, generation after generation, Americans have debated great questions amid great strife, and have done great things. \n\nWe have fought for freedom, expanded liberty, defeated totalitarianism and terror. \n\nAnd built the strongest, freest, and most prosperous nation the world has ever known. \n\nNow is the hour. \n\nOur moment of responsibility. \n\nOur test of resolve and conscience, of history itself. \n\nIt is in this moment that our character is formed. Our purpose is found. Our future is forged. \n\nWell I know this nation. \n\nWe will meet the test. \n\nTo protect freedom and liberty, to expand fairness and opportunity. \n\nWe will save democracy. \n\nAs hard as these times have been, I am more optimistic about America today than I have been my whole life. \n\nBecause I see the future that is within our grasp. \n\nBecause I know there is simply nothing beyond our capacity. \n\nWe are the only nation on Earth that has always turned every crisis we have faced into an opportunity. \n\nThe only nation that can be defined by a single word: possibilities. \n\nSo on this night, in our 245th year as a nation, I have come to report on the State of the Union. \n\nAnd my report is this: the State of the Union is strongโ€”because you, the American people, are strong. \n\nWe are stronger today than we were a year ago. \n\nAnd we will be stronger a year from now than we are today. \n\nNow is our moment to meet and overcome the challenges of our time. \n\nAnd we will, as one people. \n\nOne America. \n\nThe United States of America. \n\nMay God bless you all. May God protect our troops.", - "textSplitter": "{{recursiveCharacterTextSplitter_0.data.instance}}", - "metadata": "" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "options": [ - { - "id": "plainText_1-output-document-Document|json", - "name": "document", - "label": "Document", - "type": "Document | json" - }, - { - "id": "plainText_1-output-text-string|json", - "name": "text", - "label": "Text", - "type": "string | json" - } - ], - "default": "document" - } - ], - "outputs": { - "output": "document" - }, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 117.23894449422778, - "y": 23.24339894687961 - }, - "dragging": false - }, - { - "width": 300, - "height": 430, - "id": "recursiveCharacterTextSplitter_0", - "position": { - "x": -259.38954307457425, - "y": 75.96855802341503 - }, - "type": "customNode", - "data": { - "id": "recursiveCharacterTextSplitter_0", - "label": "Recursive Character Text Splitter", - "version": 2, - "name": "recursiveCharacterTextSplitter", - "type": "RecursiveCharacterTextSplitter", - "baseClasses": ["RecursiveCharacterTextSplitter", "TextSplitter", "BaseDocumentTransformer", "Runnable"], - "category": "Text Splitters", - "description": "Split documents recursively by different characters - starting with \"\\n\\n\", then \"\\n\", then \" \"", - "inputParams": [ - { - "label": "Chunk Size", - "name": "chunkSize", - "type": "number", - "default": 1000, - "optional": true, - "id": "recursiveCharacterTextSplitter_0-input-chunkSize-number" - }, - { - "label": "Chunk Overlap", - "name": "chunkOverlap", - "type": "number", - "optional": true, - "id": "recursiveCharacterTextSplitter_0-input-chunkOverlap-number" - }, - { - "label": "Custom Separators", - "name": "separators", - "type": "string", - "rows": 4, - "description": "Array of custom separators to determine when to split the text, will override the default separators", - "placeholder": "[\"|\", \"##\", \">\", \"-\"]", - "additionalParams": true, - "optional": true, - "id": "recursiveCharacterTextSplitter_0-input-separators-string" - } - ], - "inputAnchors": [], - "inputs": { - "chunkSize": 1000, - "chunkOverlap": "", - "separators": "" - }, - "outputAnchors": [ - { - "id": "recursiveCharacterTextSplitter_0-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter|BaseDocumentTransformer|Runnable", - "name": "recursiveCharacterTextSplitter", - "label": "RecursiveCharacterTextSplitter", - "type": "RecursiveCharacterTextSplitter | TextSplitter | BaseDocumentTransformer | Runnable" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": -259.38954307457425, - "y": 75.96855802341503 - }, - "dragging": false - }, - { - "width": 300, - "height": 435, - "id": "conversationalAgent_0", - "position": { - "x": 2432.125364763489, - "y": -105.27942167533908 - }, - "type": "customNode", - "data": { - "id": "conversationalAgent_0", - "label": "Conversational Agent", - "version": 3, - "name": "conversationalAgent", - "type": "AgentExecutor", - "baseClasses": ["AgentExecutor", "BaseChain", "Runnable"], - "category": "Agents", - "description": "Conversational agent for a chat model. It will utilize chat specific prompts", - "inputParams": [ - { - "label": "System Message", - "name": "systemMessage", - "type": "string", - "rows": 4, - "default": "Assistant is a large language model trained by OpenAI.\n\nAssistant is designed to be able to assist with a wide range of tasks, from answering simple questions to providing in-depth explanations and discussions on a wide range of topics. As a language model, Assistant is able to generate human-like text based on the input it receives, allowing it to engage in natural-sounding conversations and provide responses that are coherent and relevant to the topic at hand.\n\nAssistant is constantly learning and improving, and its capabilities are constantly evolving. It is able to process and understand large amounts of text, and can use this knowledge to provide accurate and informative responses to a wide range of questions. Additionally, Assistant is able to generate its own text based on the input it receives, allowing it to engage in discussions and provide explanations and descriptions on a wide range of topics.\n\nOverall, Assistant is a powerful system that can help with a wide range of tasks and provide valuable insights and information on a wide range of topics. Whether you need help with a specific question or just want to have a conversation about a particular topic, Assistant is here to assist.", - "optional": true, - "additionalParams": true, - "id": "conversationalAgent_0-input-systemMessage-string" - }, - { - "label": "Max Iterations", - "name": "maxIterations", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "conversationalAgent_0-input-maxIterations-number" - } - ], - "inputAnchors": [ - { - "label": "Allowed Tools", - "name": "tools", - "type": "Tool", - "list": true, - "id": "conversationalAgent_0-input-tools-Tool" - }, - { - "label": "Chat Model", - "name": "model", - "type": "BaseChatModel", - "id": "conversationalAgent_0-input-model-BaseChatModel" - }, - { - "label": "Memory", - "name": "memory", - "type": "BaseChatMemory", - "id": "conversationalAgent_0-input-memory-BaseChatMemory" - }, - { - "label": "Input Moderation", - "description": "Detect text that could generate harmful output and prevent it from being sent to the language model", - "name": "inputModeration", - "type": "Moderation", - "optional": true, - "list": true, - "id": "conversationalAgent_0-input-inputModeration-Moderation" - } - ], - "inputs": { - "inputModeration": "", - "tools": ["{{chainTool_2.data.instance}}", "{{chainTool_3.data.instance}}"], - "model": "{{chatOllama_0.data.instance}}", - "memory": "{{bufferMemory_0.data.instance}}", - "systemMessage": "Assistant is a large language model trained by OpenAI.\n\nAssistant is designed to be able to assist with a wide range of tasks, from answering simple questions to providing in-depth explanations and discussions on a wide range of topics. As a language model, Assistant is able to generate human-like text based on the input it receives, allowing it to engage in natural-sounding conversations and provide responses that are coherent and relevant to the topic at hand.\n\nAssistant is constantly learning and improving, and its capabilities are constantly evolving. It is able to process and understand large amounts of text, and can use this knowledge to provide accurate and informative responses to a wide range of questions. Additionally, Assistant is able to generate its own text based on the input it receives, allowing it to engage in discussions and provide explanations and descriptions on a wide range of topics.\n\nOverall, Assistant is a powerful system that can help with a wide range of tasks and provide valuable insights and information on a wide range of topics. Whether you need help with a specific question or just want to have a conversation about a particular topic, Assistant is here to assist." - }, - "outputAnchors": [ - { - "id": "conversationalAgent_0-output-conversationalAgent-AgentExecutor|BaseChain|Runnable", - "name": "conversationalAgent", - "label": "AgentExecutor", - "type": "AgentExecutor | BaseChain | Runnable" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 2432.125364763489, - "y": -105.27942167533908 - }, - "dragging": false - }, - { - "id": "chatOllama_0", - "position": { - "x": 1662.4375746412504, - "y": 114.83248283616422 - }, - "type": "customNode", - "data": { - "id": "chatOllama_0", - "label": "ChatOllama", - "version": 2, - "name": "chatOllama", - "type": "ChatOllama", - "baseClasses": ["ChatOllama", "SimpleChatModel", "BaseChatModel", "BaseLanguageModel", "Runnable"], - "category": "Chat Models", - "description": "Chat completion using open-source LLM on Ollama", - "inputParams": [ - { - "label": "Base URL", - "name": "baseUrl", - "type": "string", - "default": "http://localhost:11434", - "id": "chatOllama_0-input-baseUrl-string" - }, - { - "label": "Model Name", - "name": "modelName", - "type": "string", - "placeholder": "llama2", - "id": "chatOllama_0-input-modelName-string" - }, - { - "label": "Temperature", - "name": "temperature", - "type": "number", - "description": "The temperature of the model. Increasing the temperature will make the model answer more creatively. (Default: 0.8). Refer to docs for more details", - "step": 0.1, - "default": 0.9, - "optional": true, - "id": "chatOllama_0-input-temperature-number" - }, - { - "label": "Top P", - "name": "topP", - "type": "number", - "description": "Works together with top-k. A higher value (e.g., 0.95) will lead to more diverse text, while a lower value (e.g., 0.5) will generate more focused and conservative text. (Default: 0.9). Refer to docs for more details", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOllama_0-input-topP-number" - }, - { - "label": "Top K", - "name": "topK", - "type": "number", - "description": "Reduces the probability of generating nonsense. A higher value (e.g. 100) will give more diverse answers, while a lower value (e.g. 10) will be more conservative. (Default: 40). Refer to docs for more details", - "step": 1, - "optional": true, - "additionalParams": true, - "id": "chatOllama_0-input-topK-number" - }, - { - "label": "Mirostat", - "name": "mirostat", - "type": "number", - "description": "Enable Mirostat sampling for controlling perplexity. (default: 0, 0 = disabled, 1 = Mirostat, 2 = Mirostat 2.0). Refer to docs for more details", - "step": 1, - "optional": true, - "additionalParams": true, - "id": "chatOllama_0-input-mirostat-number" - }, - { - "label": "Mirostat ETA", - "name": "mirostatEta", - "type": "number", - "description": "Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive. (Default: 0.1) Refer to docs for more details", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOllama_0-input-mirostatEta-number" - }, - { - "label": "Mirostat TAU", - "name": "mirostatTau", - "type": "number", - "description": "Controls the balance between coherence and diversity of the output. A lower value will result in more focused and coherent text. (Default: 5.0) Refer to docs for more details", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOllama_0-input-mirostatTau-number" - }, - { - "label": "Context Window Size", - "name": "numCtx", - "type": "number", - "description": "Sets the size of the context window used to generate the next token. (Default: 2048) Refer to docs for more details", - "step": 1, - "optional": true, - "additionalParams": true, - "id": "chatOllama_0-input-numCtx-number" - }, - { - "label": "Number of GQA groups", - "name": "numGqa", - "type": "number", - "description": "The number of GQA groups in the transformer layer. Required for some models, for example it is 8 for llama2:70b. Refer to docs for more details", - "step": 1, - "optional": true, - "additionalParams": true, - "id": "chatOllama_0-input-numGqa-number" - }, - { - "label": "Number of GPU", - "name": "numGpu", - "type": "number", - "description": "The number of layers to send to the GPU(s). On macOS it defaults to 1 to enable metal support, 0 to disable. Refer to docs for more details", - "step": 1, - "optional": true, - "additionalParams": true, - "id": "chatOllama_0-input-numGpu-number" - }, - { - "label": "Number of Thread", - "name": "numThread", - "type": "number", - "description": "Sets the number of threads to use during computation. By default, Ollama will detect this for optimal performance. It is recommended to set this value to the number of physical CPU cores your system has (as opposed to the logical number of cores). Refer to docs for more details", - "step": 1, - "optional": true, - "additionalParams": true, - "id": "chatOllama_0-input-numThread-number" - }, - { - "label": "Repeat Last N", - "name": "repeatLastN", - "type": "number", - "description": "Sets how far back for the model to look back to prevent repetition. (Default: 64, 0 = disabled, -1 = num_ctx). Refer to docs for more details", - "step": 1, - "optional": true, - "additionalParams": true, - "id": "chatOllama_0-input-repeatLastN-number" - }, - { - "label": "Repeat Penalty", - "name": "repeatPenalty", - "type": "number", - "description": "Sets how strongly to penalize repetitions. A higher value (e.g., 1.5) will penalize repetitions more strongly, while a lower value (e.g., 0.9) will be more lenient. (Default: 1.1). Refer to docs for more details", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOllama_0-input-repeatPenalty-number" - }, - { - "label": "Stop Sequence", - "name": "stop", - "type": "string", - "rows": 4, - "placeholder": "AI assistant:", - "description": "Sets the stop sequences to use. Use comma to seperate different sequences. Refer to docs for more details", - "optional": true, - "additionalParams": true, - "id": "chatOllama_0-input-stop-string" - }, - { - "label": "Tail Free Sampling", - "name": "tfsZ", - "type": "number", - "description": "Tail free sampling is used to reduce the impact of less probable tokens from the output. A higher value (e.g., 2.0) will reduce the impact more, while a value of 1.0 disables this setting. (Default: 1). Refer to docs for more details", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOllama_0-input-tfsZ-number" - } - ], - "inputAnchors": [ - { - "label": "Cache", - "name": "cache", - "type": "BaseCache", - "optional": true, - "id": "chatOllama_0-input-cache-BaseCache" - } - ], - "inputs": { - "cache": "", - "baseUrl": "http://localhost:11434", - "modelName": "llama2", - "temperature": 0.9, - "topP": "", - "topK": "", - "mirostat": "", - "mirostatEta": "", - "mirostatTau": "", - "numCtx": "", - "numGqa": "", - "numGpu": "", - "numThread": "", - "repeatLastN": "", - "repeatPenalty": "", - "stop": "", - "tfsZ": "" - }, - "outputAnchors": [ - { - "id": "chatOllama_0-output-chatOllama-ChatOllama|SimpleChatModel|BaseChatModel|BaseLanguageModel|Runnable", - "name": "chatOllama", - "label": "ChatOllama", - "description": "Chat completion using open-source LLM on Ollama", - "type": "ChatOllama | SimpleChatModel | BaseChatModel | BaseLanguageModel | Runnable" - } - ], - "outputs": {}, - "selected": false - }, - "width": 300, - "height": 580, - "selected": false, - "positionAbsolute": { - "x": 1662.4375746412504, - "y": 114.83248283616422 - }, - "dragging": false - }, - { - "id": "stickyNote_0", - "position": { - "x": 2421.3310049814813, - "y": -395.88989972468414 - }, - "type": "stickyNote", - "data": { - "id": "stickyNote_0", - "label": "Sticky Note", - "version": 2, - "name": "stickyNote", - "type": "StickyNote", - "baseClasses": ["StickyNote"], - "tags": ["Utilities"], - "category": "Utilities", - "description": "Add a sticky note", - "inputParams": [ - { - "label": "", - "name": "note", - "type": "string", - "rows": 1, - "placeholder": "Type something here", - "optional": true, - "id": "stickyNote_0-input-note-string" - } - ], - "inputAnchors": [], - "inputs": { - "note": "Conversational Agent is suitable for LLM which doesn't have function calling support.\n\nIt uses the prompt to decide which Chain Tool is appropriate to answer user question. Downside is there could be higher error rate due to hallucination.\n\nOtherwise, it is recommended to use Multiple Documents QnA template which uses Tool Agent" - }, - "outputAnchors": [ - { - "id": "stickyNote_0-output-stickyNote-StickyNote", - "name": "stickyNote", - "label": "StickyNote", - "description": "Add a sticky note", - "type": "StickyNote" - } - ], - "outputs": {}, - "selected": false - }, - "width": 300, - "height": 264, - "selected": false, - "positionAbsolute": { - "x": 2421.3310049814813, - "y": -395.88989972468414 - }, - "dragging": false - } - ], - "edges": [ - { - "source": "retrievalQAChain_0", - "sourceHandle": "retrievalQAChain_0-output-retrievalQAChain-RetrievalQAChain|BaseChain|BaseLangChain", - "target": "chainTool_2", - "targetHandle": "chainTool_2-input-baseChain-BaseChain", - "type": "buttonedge", - "id": "retrievalQAChain_0-retrievalQAChain_0-output-retrievalQAChain-RetrievalQAChain|BaseChain|BaseLangChain-chainTool_2-chainTool_2-input-baseChain-BaseChain", - "data": { - "label": "" - } - }, - { - "source": "retrievalQAChain_1", - "sourceHandle": "retrievalQAChain_1-output-retrievalQAChain-RetrievalQAChain|BaseChain|BaseLangChain", - "target": "chainTool_3", - "targetHandle": "chainTool_3-input-baseChain-BaseChain", - "type": "buttonedge", - "id": "retrievalQAChain_1-retrievalQAChain_1-output-retrievalQAChain-RetrievalQAChain|BaseChain|BaseLangChain-chainTool_3-chainTool_3-input-baseChain-BaseChain", - "data": { - "label": "" - } - }, - { - "source": "openAIEmbeddings_1", - "sourceHandle": "openAIEmbeddings_1-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", - "target": "redis_0", - "targetHandle": "redis_0-input-embeddings-Embeddings", - "type": "buttonedge", - "id": "openAIEmbeddings_1-openAIEmbeddings_1-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-redis_0-redis_0-input-embeddings-Embeddings", - "data": { - "label": "" - } - }, - { - "source": "redis_0", - "sourceHandle": "redis_0-output-retriever-Redis|VectorStoreRetriever|BaseRetriever", - "target": "retrievalQAChain_0", - "targetHandle": "retrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", - "type": "buttonedge", - "id": "redis_0-redis_0-output-retriever-Redis|VectorStoreRetriever|BaseRetriever-retrievalQAChain_0-retrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", - "data": { - "label": "" - } - }, - { - "source": "plainText_0", - "sourceHandle": "plainText_0-output-document-Document|json", - "target": "redis_0", - "targetHandle": "redis_0-input-document-Document", - "type": "buttonedge", - "id": "plainText_0-plainText_0-output-document-Document|json-redis_0-redis_0-input-document-Document", - "data": { - "label": "" - } - }, - { - "source": "chatOpenAI_0", - "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", - "target": "retrievalQAChain_0", - "targetHandle": "retrievalQAChain_0-input-model-BaseLanguageModel", - "type": "buttonedge", - "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-retrievalQAChain_0-retrievalQAChain_0-input-model-BaseLanguageModel", - "data": { - "label": "" - } - }, - { - "source": "chatOpenAI_1", - "sourceHandle": "chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", - "target": "retrievalQAChain_1", - "targetHandle": "retrievalQAChain_1-input-model-BaseLanguageModel", - "type": "buttonedge", - "id": "chatOpenAI_1-chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-retrievalQAChain_1-retrievalQAChain_1-input-model-BaseLanguageModel", - "data": { - "label": "" - } - }, - { - "source": "faiss_0", - "sourceHandle": "faiss_0-output-retriever-Faiss|VectorStoreRetriever|BaseRetriever", - "target": "retrievalQAChain_1", - "targetHandle": "retrievalQAChain_1-input-vectorStoreRetriever-BaseRetriever", - "type": "buttonedge", - "id": "faiss_0-faiss_0-output-retriever-Faiss|VectorStoreRetriever|BaseRetriever-retrievalQAChain_1-retrievalQAChain_1-input-vectorStoreRetriever-BaseRetriever", - "data": { - "label": "" - } - }, - { - "source": "openAIEmbeddings_2", - "sourceHandle": "openAIEmbeddings_2-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", - "target": "faiss_0", - "targetHandle": "faiss_0-input-embeddings-Embeddings", - "type": "buttonedge", - "id": "openAIEmbeddings_2-openAIEmbeddings_2-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-faiss_0-faiss_0-input-embeddings-Embeddings", - "data": { - "label": "" - } - }, - { - "source": "plainText_1", - "sourceHandle": "plainText_1-output-document-Document|json", - "target": "faiss_0", - "targetHandle": "faiss_0-input-document-Document", - "type": "buttonedge", - "id": "plainText_1-plainText_1-output-document-Document|json-faiss_0-faiss_0-input-document-Document", - "data": { - "label": "" - } - }, - { - "source": "recursiveCharacterTextSplitter_0", - "sourceHandle": "recursiveCharacterTextSplitter_0-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter|BaseDocumentTransformer|Runnable", - "target": "plainText_1", - "targetHandle": "plainText_1-input-textSplitter-TextSplitter", - "type": "buttonedge", - "id": "recursiveCharacterTextSplitter_0-recursiveCharacterTextSplitter_0-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter|BaseDocumentTransformer|Runnable-plainText_1-plainText_1-input-textSplitter-TextSplitter", - "data": { - "label": "" - } - }, - { - "source": "chainTool_2", - "sourceHandle": "chainTool_2-output-chainTool-ChainTool|DynamicTool|Tool|StructuredTool|BaseLangChain", - "target": "conversationalAgent_0", - "targetHandle": "conversationalAgent_0-input-tools-Tool", - "type": "buttonedge", - "id": "chainTool_2-chainTool_2-output-chainTool-ChainTool|DynamicTool|Tool|StructuredTool|BaseLangChain-conversationalAgent_0-conversationalAgent_0-input-tools-Tool", - "data": { - "label": "" - } - }, - { - "source": "chainTool_3", - "sourceHandle": "chainTool_3-output-chainTool-ChainTool|DynamicTool|Tool|StructuredTool|BaseLangChain", - "target": "conversationalAgent_0", - "targetHandle": "conversationalAgent_0-input-tools-Tool", - "type": "buttonedge", - "id": "chainTool_3-chainTool_3-output-chainTool-ChainTool|DynamicTool|Tool|StructuredTool|BaseLangChain-conversationalAgent_0-conversationalAgent_0-input-tools-Tool", - "data": { - "label": "" - } - }, - { - "source": "bufferMemory_0", - "sourceHandle": "bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory", - "target": "conversationalAgent_0", - "targetHandle": "conversationalAgent_0-input-memory-BaseChatMemory", - "type": "buttonedge", - "id": "bufferMemory_0-bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory-conversationalAgent_0-conversationalAgent_0-input-memory-BaseChatMemory", - "data": { - "label": "" - } - }, - { - "source": "chatOllama_0", - "sourceHandle": "chatOllama_0-output-chatOllama-ChatOllama|SimpleChatModel|BaseChatModel|BaseLanguageModel|Runnable", - "target": "conversationalAgent_0", - "targetHandle": "conversationalAgent_0-input-model-BaseChatModel", - "type": "buttonedge", - "id": "chatOllama_0-chatOllama_0-output-chatOllama-ChatOllama|SimpleChatModel|BaseChatModel|BaseLanguageModel|Runnable-conversationalAgent_0-conversationalAgent_0-input-model-BaseChatModel" - } - ] -} diff --git a/packages/server/marketplaces/chatflows/API Agent.json b/packages/server/marketplaces/chatflows/OpenAPI YAML Agent.json similarity index 99% rename from packages/server/marketplaces/chatflows/API Agent.json rename to packages/server/marketplaces/chatflows/OpenAPI YAML Agent.json index 5d3084bec..5e958da7d 100644 --- a/packages/server/marketplaces/chatflows/API Agent.json +++ b/packages/server/marketplaces/chatflows/OpenAPI YAML Agent.json @@ -1,5 +1,5 @@ { - "description": "Given API docs, agent automatically decide which API to call, generating url and body request from conversation", + "description": "Given an OpenAPI YAML file, agent automatically decide which API to call, generating url and body request from conversation", "framework": ["Langchain"], "usecases": ["Interacting with API"], "nodes": [ diff --git a/packages/server/marketplaces/chatflows/Prompt Chaining with VectorStore.json b/packages/server/marketplaces/chatflows/Prompt Chaining with VectorStore.json deleted file mode 100644 index 881106fce..000000000 --- a/packages/server/marketplaces/chatflows/Prompt Chaining with VectorStore.json +++ /dev/null @@ -1,1277 +0,0 @@ -{ - "description": "Use chat history to rephrase user question, then answer the rephrased question using retrieved docs from vector store", - "usecases": ["Documents QnA"], - "framework": ["Langchain"], - "nodes": [ - { - "width": 300, - "height": 511, - "id": "promptTemplate_0", - "position": { - "x": 344.73370692733414, - "y": -122.34815000085804 - }, - "type": "customNode", - "data": { - "id": "promptTemplate_0", - "label": "Prompt Template", - "version": 1, - "name": "promptTemplate", - "type": "PromptTemplate", - "baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate", "Runnable"], - "category": "Prompts", - "description": "Schema to represent a basic prompt for an LLM", - "inputParams": [ - { - "label": "Template", - "name": "template", - "type": "string", - "rows": 4, - "placeholder": "What is a good name for a company that makes {product}?", - "id": "promptTemplate_0-input-template-string" - }, - { - "label": "Format Prompt Values", - "name": "promptValues", - "type": "json", - "optional": true, - "acceptVariable": true, - "list": true, - "id": "promptTemplate_0-input-promptValues-json" - } - ], - "inputAnchors": [], - "inputs": { - "template": "Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question.\n\nChat History:\n{chat_history}\nFollow Up Input: {question}\nStandalone question:", - "promptValues": "{\"question\":\"{{question}}\",\"chat_history\":\"{{chat_history}}\"}" - }, - "outputAnchors": [ - { - "id": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate|Runnable", - "name": "promptTemplate", - "label": "PromptTemplate", - "type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate | Runnable" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 344.73370692733414, - "y": -122.34815000085804 - }, - "dragging": false - }, - { - "width": 300, - "height": 688, - "id": "chatPromptTemplate_0", - "position": { - "x": 2314.8876045231254, - "y": -163.68061503572068 - }, - "type": "customNode", - "data": { - "id": "chatPromptTemplate_0", - "label": "Chat Prompt Template", - "version": 1, - "name": "chatPromptTemplate", - "type": "ChatPromptTemplate", - "baseClasses": ["ChatPromptTemplate", "BaseChatPromptTemplate", "BasePromptTemplate", "Runnable"], - "category": "Prompts", - "description": "Schema to represent a chat prompt", - "inputParams": [ - { - "label": "System Message", - "name": "systemMessagePrompt", - "type": "string", - "rows": 4, - "placeholder": "You are a helpful assistant that translates {input_language} to {output_language}.", - "id": "chatPromptTemplate_0-input-systemMessagePrompt-string" - }, - { - "label": "Human Message", - "name": "humanMessagePrompt", - "type": "string", - "rows": 4, - "placeholder": "{text}", - "id": "chatPromptTemplate_0-input-humanMessagePrompt-string" - }, - { - "label": "Format Prompt Values", - "name": "promptValues", - "type": "json", - "optional": true, - "acceptVariable": true, - "list": true, - "id": "chatPromptTemplate_0-input-promptValues-json" - } - ], - "inputAnchors": [], - "inputs": { - "systemMessagePrompt": "Using the provided context, answer the user's question to the best of your ability using the resources provided. If there is nothing in the context relevant to the question at hand, just say \"Hmm, I'm not sure.\" Don't try to make up an answer.\n\nAnything between the following \\`context\\` html blocks is retrieved from a knowledge bank, not part of the conversation with the user.\n\n\n {context}\n\n\nREMEMBER: If there is no relevant information within the context, just say \"Hmm, I'm not sure.\" Don't try to make up an answer. Anything between the preceding 'context' html blocks is retrieved from a knowledge bank, not part of the conversation with the user.", - "humanMessagePrompt": "{text}", - "promptValues": "{\"context\":\"{{vectorStoreToDocument_0.data.instance}}\",\"text\":\"{{question}}\"}" - }, - "outputAnchors": [ - { - "id": "chatPromptTemplate_0-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate|Runnable", - "name": "chatPromptTemplate", - "label": "ChatPromptTemplate", - "type": "ChatPromptTemplate | BaseChatPromptTemplate | BasePromptTemplate | Runnable" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 2314.8876045231254, - "y": -163.68061503572068 - }, - "dragging": false - }, - { - "width": 300, - "height": 454, - "id": "vectorStoreToDocument_0", - "position": { - "x": 1906.6871314089658, - "y": -157.0046189166955 - }, - "type": "customNode", - "data": { - "id": "vectorStoreToDocument_0", - "label": "VectorStore To Document", - "version": 2, - "name": "vectorStoreToDocument", - "type": "Document", - "baseClasses": ["Document"], - "category": "Document Loaders", - "description": "Search documents with scores from vector store", - "inputParams": [ - { - "label": "Query", - "name": "query", - "type": "string", - "description": "Query to retrieve documents from vector database. If not specified, user question will be used", - "optional": true, - "acceptVariable": true, - "id": "vectorStoreToDocument_0-input-query-string" - }, - { - "label": "Minimum Score (%)", - "name": "minScore", - "type": "number", - "optional": true, - "placeholder": "75", - "step": 1, - "description": "Minumum score for embeddings documents to be included", - "id": "vectorStoreToDocument_0-input-minScore-number" - } - ], - "inputAnchors": [ - { - "label": "Vector Store", - "name": "vectorStore", - "type": "VectorStore", - "id": "vectorStoreToDocument_0-input-vectorStore-VectorStore" - } - ], - "inputs": { - "vectorStore": "{{singlestore_0.data.instance}}", - "query": "{{llmChain_2.data.instance}}", - "minScore": "" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "options": [ - { - "id": "vectorStoreToDocument_0-output-document-Document|json", - "name": "document", - "label": "Document", - "type": "Document | json" - }, - { - "id": "vectorStoreToDocument_0-output-text-string|json", - "name": "text", - "label": "Text", - "type": "string | json" - } - ], - "default": "document" - } - ], - "outputs": { - "output": "text" - }, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 1906.6871314089658, - "y": -157.0046189166955 - }, - "dragging": false - }, - { - "width": 300, - "height": 507, - "id": "llmChain_2", - "position": { - "x": 756.2678342825631, - "y": -244.07972550448233 - }, - "type": "customNode", - "data": { - "id": "llmChain_2", - "label": "LLM Chain", - "version": 3, - "name": "llmChain", - "type": "LLMChain", - "baseClasses": ["LLMChain", "BaseChain", "Runnable"], - "category": "Chains", - "description": "Chain to run queries against LLMs", - "inputParams": [ - { - "label": "Chain Name", - "name": "chainName", - "type": "string", - "placeholder": "Name Your Chain", - "optional": true, - "id": "llmChain_2-input-chainName-string" - } - ], - "inputAnchors": [ - { - "label": "Language Model", - "name": "model", - "type": "BaseLanguageModel", - "id": "llmChain_2-input-model-BaseLanguageModel" - }, - { - "label": "Prompt", - "name": "prompt", - "type": "BasePromptTemplate", - "id": "llmChain_2-input-prompt-BasePromptTemplate" - }, - { - "label": "Output Parser", - "name": "outputParser", - "type": "BaseLLMOutputParser", - "optional": true, - "id": "llmChain_2-input-outputParser-BaseLLMOutputParser" - }, - { - "label": "Input Moderation", - "description": "Detect text that could generate harmful output and prevent it from being sent to the language model", - "name": "inputModeration", - "type": "Moderation", - "optional": true, - "list": true, - "id": "llmChain_2-input-inputModeration-Moderation" - } - ], - "inputs": { - "model": "{{chatOpenAI_0.data.instance}}", - "prompt": "{{promptTemplate_0.data.instance}}", - "outputParser": "", - "chainName": "RephraseQuestion", - "inputModeration": "" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "options": [ - { - "id": "llmChain_2-output-llmChain-LLMChain|BaseChain|Runnable", - "name": "llmChain", - "label": "LLM Chain", - "type": "LLMChain | BaseChain | Runnable" - }, - { - "id": "llmChain_2-output-outputPrediction-string|json", - "name": "outputPrediction", - "label": "Output Prediction", - "type": "string | json" - } - ], - "default": "llmChain" - } - ], - "outputs": { - "output": "outputPrediction" - }, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 756.2678342825631, - "y": -244.07972550448233 - }, - "dragging": false - }, - { - "width": 300, - "height": 507, - "id": "llmChain_1", - "position": { - "x": 2716.1571046184436, - "y": -279.02657697343375 - }, - "type": "customNode", - "data": { - "id": "llmChain_1", - "label": "LLM Chain", - "version": 3, - "name": "llmChain", - "type": "LLMChain", - "baseClasses": ["LLMChain", "BaseChain", "Runnable"], - "category": "Chains", - "description": "Chain to run queries against LLMs", - "inputParams": [ - { - "label": "Chain Name", - "name": "chainName", - "type": "string", - "placeholder": "Name Your Chain", - "optional": true, - "id": "llmChain_1-input-chainName-string" - } - ], - "inputAnchors": [ - { - "label": "Language Model", - "name": "model", - "type": "BaseLanguageModel", - "id": "llmChain_1-input-model-BaseLanguageModel" - }, - { - "label": "Prompt", - "name": "prompt", - "type": "BasePromptTemplate", - "id": "llmChain_1-input-prompt-BasePromptTemplate" - }, - { - "label": "Output Parser", - "name": "outputParser", - "type": "BaseLLMOutputParser", - "optional": true, - "id": "llmChain_1-input-outputParser-BaseLLMOutputParser" - }, - { - "label": "Input Moderation", - "description": "Detect text that could generate harmful output and prevent it from being sent to the language model", - "name": "inputModeration", - "type": "Moderation", - "optional": true, - "list": true, - "id": "llmChain_1-input-inputModeration-Moderation" - } - ], - "inputs": { - "model": "{{chatOpenAI_1.data.instance}}", - "prompt": "{{chatPromptTemplate_0.data.instance}}", - "outputParser": "", - "chainName": "FinalResponse", - "inputModeration": "" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "options": [ - { - "id": "llmChain_1-output-llmChain-LLMChain|BaseChain|Runnable", - "name": "llmChain", - "label": "LLM Chain", - "type": "LLMChain | BaseChain | Runnable" - }, - { - "id": "llmChain_1-output-outputPrediction-string|json", - "name": "outputPrediction", - "label": "Output Prediction", - "type": "string | json" - } - ], - "default": "llmChain" - } - ], - "outputs": { - "output": "llmChain" - }, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 2716.1571046184436, - "y": -279.02657697343375 - }, - "dragging": false - }, - { - "width": 300, - "height": 669, - "id": "chatOpenAI_0", - "position": { - "x": 344.77878441903204, - "y": -832.2188929689953 - }, - "type": "customNode", - "data": { - "id": "chatOpenAI_0", - "label": "ChatOpenAI", - "version": 6, - "name": "chatOpenAI", - "type": "ChatOpenAI", - "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "Runnable"], - "category": "Chat Models", - "description": "Wrapper around OpenAI large language models that use the Chat endpoint", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["openAIApi"], - "id": "chatOpenAI_0-input-credential-credential" - }, - { - "label": "Model Name", - "name": "modelName", - "type": "asyncOptions", - "loadMethod": "listModels", - "default": "gpt-3.5-turbo", - "id": "chatOpenAI_0-input-modelName-options" - }, - { - "label": "Temperature", - "name": "temperature", - "type": "number", - "step": 0.1, - "default": 0.9, - "optional": true, - "id": "chatOpenAI_0-input-temperature-number" - }, - { - "label": "Max Tokens", - "name": "maxTokens", - "type": "number", - "step": 1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-maxTokens-number" - }, - { - "label": "Top Probability", - "name": "topP", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-topP-number" - }, - { - "label": "Frequency Penalty", - "name": "frequencyPenalty", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-frequencyPenalty-number" - }, - { - "label": "Presence Penalty", - "name": "presencePenalty", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-presencePenalty-number" - }, - { - "label": "Timeout", - "name": "timeout", - "type": "number", - "step": 1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-timeout-number" - }, - { - "label": "BasePath", - "name": "basepath", - "type": "string", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-basepath-string" - }, - { - "label": "BaseOptions", - "name": "baseOptions", - "type": "json", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-baseOptions-json" - }, - { - "label": "Allow Image Uploads", - "name": "allowImageUploads", - "type": "boolean", - "description": "Automatically uses gpt-4-vision-preview when image is being uploaded from chat. Only works with LLMChain, Conversation Chain, ReAct Agent, and Conversational Agent", - "default": false, - "optional": true, - "id": "chatOpenAI_0-input-allowImageUploads-boolean" - }, - { - "label": "Image Resolution", - "description": "This parameter controls the resolution in which the model views the image.", - "name": "imageResolution", - "type": "options", - "options": [ - { - "label": "Low", - "name": "low" - }, - { - "label": "High", - "name": "high" - }, - { - "label": "Auto", - "name": "auto" - } - ], - "default": "low", - "optional": false, - "additionalParams": true, - "id": "chatOpenAI_0-input-imageResolution-options" - } - ], - "inputAnchors": [ - { - "label": "Cache", - "name": "cache", - "type": "BaseCache", - "optional": true, - "id": "chatOpenAI_0-input-cache-BaseCache" - } - ], - "inputs": { - "cache": "", - "modelName": "gpt-3.5-turbo-16k", - "temperature": "0", - "maxTokens": "", - "topP": "", - "frequencyPenalty": "", - "presencePenalty": "", - "timeout": "", - "basepath": "", - "baseOptions": "", - "allowImageUploads": true, - "imageResolution": "low" - }, - "outputAnchors": [ - { - "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", - "name": "chatOpenAI", - "label": "ChatOpenAI", - "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | Runnable" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 344.77878441903204, - "y": -832.2188929689953 - }, - "dragging": false - }, - { - "width": 300, - "height": 669, - "id": "chatOpenAI_1", - "position": { - "x": 2296.3207911691625, - "y": -880.514745028577 - }, - "type": "customNode", - "data": { - "id": "chatOpenAI_1", - "label": "ChatOpenAI", - "version": 6, - "name": "chatOpenAI", - "type": "ChatOpenAI", - "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "Runnable"], - "category": "Chat Models", - "description": "Wrapper around OpenAI large language models that use the Chat endpoint", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["openAIApi"], - "id": "chatOpenAI_1-input-credential-credential" - }, - { - "label": "Model Name", - "name": "modelName", - "type": "asyncOptions", - "loadMethod": "listModels", - "default": "gpt-3.5-turbo", - "id": "chatOpenAI_1-input-modelName-options" - }, - { - "label": "Temperature", - "name": "temperature", - "type": "number", - "step": 0.1, - "default": 0.9, - "optional": true, - "id": "chatOpenAI_1-input-temperature-number" - }, - { - "label": "Max Tokens", - "name": "maxTokens", - "type": "number", - "step": 1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_1-input-maxTokens-number" - }, - { - "label": "Top Probability", - "name": "topP", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_1-input-topP-number" - }, - { - "label": "Frequency Penalty", - "name": "frequencyPenalty", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_1-input-frequencyPenalty-number" - }, - { - "label": "Presence Penalty", - "name": "presencePenalty", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_1-input-presencePenalty-number" - }, - { - "label": "Timeout", - "name": "timeout", - "type": "number", - "step": 1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_1-input-timeout-number" - }, - { - "label": "BasePath", - "name": "basepath", - "type": "string", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_1-input-basepath-string" - }, - { - "label": "BaseOptions", - "name": "baseOptions", - "type": "json", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_1-input-baseOptions-json" - }, - { - "label": "Allow Image Uploads", - "name": "allowImageUploads", - "type": "boolean", - "description": "Automatically uses gpt-4-vision-preview when image is being uploaded from chat. Only works with LLMChain, Conversation Chain, ReAct Agent, and Conversational Agent", - "default": false, - "optional": true, - "id": "chatOpenAI_1-input-allowImageUploads-boolean" - }, - { - "label": "Image Resolution", - "description": "This parameter controls the resolution in which the model views the image.", - "name": "imageResolution", - "type": "options", - "options": [ - { - "label": "Low", - "name": "low" - }, - { - "label": "High", - "name": "high" - }, - { - "label": "Auto", - "name": "auto" - } - ], - "default": "low", - "optional": false, - "additionalParams": true, - "id": "chatOpenAI_1-input-imageResolution-options" - } - ], - "inputAnchors": [ - { - "label": "Cache", - "name": "cache", - "type": "BaseCache", - "optional": true, - "id": "chatOpenAI_1-input-cache-BaseCache" - } - ], - "inputs": { - "cache": "", - "modelName": "gpt-3.5-turbo-16k", - "temperature": "0", - "maxTokens": "", - "topP": "", - "frequencyPenalty": "", - "presencePenalty": "", - "timeout": "", - "basepath": "", - "baseOptions": "", - "allowImageUploads": true, - "imageResolution": "low" - }, - "outputAnchors": [ - { - "id": "chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", - "name": "chatOpenAI", - "label": "ChatOpenAI", - "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | Runnable" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 2296.3207911691625, - "y": -880.514745028577 - }, - "dragging": false - }, - { - "width": 300, - "height": 652, - "id": "singlestore_0", - "position": { - "x": 1530.532503048084, - "y": -657.3586990397077 - }, - "type": "customNode", - "data": { - "id": "singlestore_0", - "label": "SingleStore", - "version": 1, - "name": "singlestore", - "type": "SingleStore", - "baseClasses": ["SingleStore", "VectorStoreRetriever", "BaseRetriever"], - "category": "Vector Stores", - "description": "Upsert embedded data and perform similarity search upon query using SingleStore, a fast and distributed cloud relational database", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "description": "Needed when using SingleStore cloud hosted", - "optional": true, - "credentialNames": ["singleStoreApi"], - "id": "singlestore_0-input-credential-credential" - }, - { - "label": "Host", - "name": "host", - "type": "string", - "id": "singlestore_0-input-host-string" - }, - { - "label": "Database", - "name": "database", - "type": "string", - "id": "singlestore_0-input-database-string" - }, - { - "label": "Table Name", - "name": "tableName", - "type": "string", - "placeholder": "embeddings", - "additionalParams": true, - "optional": true, - "id": "singlestore_0-input-tableName-string" - }, - { - "label": "Content Column Name", - "name": "contentColumnName", - "type": "string", - "placeholder": "content", - "additionalParams": true, - "optional": true, - "id": "singlestore_0-input-contentColumnName-string" - }, - { - "label": "Vector Column Name", - "name": "vectorColumnName", - "type": "string", - "placeholder": "vector", - "additionalParams": true, - "optional": true, - "id": "singlestore_0-input-vectorColumnName-string" - }, - { - "label": "Metadata Column Name", - "name": "metadataColumnName", - "type": "string", - "placeholder": "metadata", - "additionalParams": true, - "optional": true, - "id": "singlestore_0-input-metadataColumnName-string" - }, - { - "label": "Top K", - "name": "topK", - "placeholder": "4", - "type": "number", - "additionalParams": true, - "optional": true, - "id": "singlestore_0-input-topK-number" - } - ], - "inputAnchors": [ - { - "label": "Document", - "name": "document", - "type": "Document", - "list": true, - "optional": true, - "id": "singlestore_0-input-document-Document" - }, - { - "label": "Embeddings", - "name": "embeddings", - "type": "Embeddings", - "id": "singlestore_0-input-embeddings-Embeddings" - } - ], - "inputs": { - "document": "", - "embeddings": "{{openAIEmbeddings_0.data.instance}}", - "host": "", - "database": "", - "tableName": "", - "contentColumnName": "", - "vectorColumnName": "", - "metadataColumnName": "", - "topK": "" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "options": [ - { - "id": "singlestore_0-output-retriever-SingleStore|VectorStoreRetriever|BaseRetriever", - "name": "retriever", - "label": "SingleStore Retriever", - "type": "SingleStore | VectorStoreRetriever | BaseRetriever" - }, - { - "id": "singlestore_0-output-vectorStore-SingleStore|VectorStore", - "name": "vectorStore", - "label": "SingleStore Vector Store", - "type": "SingleStore | VectorStore" - } - ], - "default": "retriever" - } - ], - "outputs": { - "output": "vectorStore" - }, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 1530.532503048084, - "y": -657.3586990397077 - }, - "dragging": false - }, - { - "width": 300, - "height": 423, - "id": "openAIEmbeddings_0", - "position": { - "x": 1154.293946350955, - "y": -589.6072684085893 - }, - "type": "customNode", - "data": { - "id": "openAIEmbeddings_0", - "label": "OpenAI Embeddings", - "version": 4, - "name": "openAIEmbeddings", - "type": "OpenAIEmbeddings", - "baseClasses": ["OpenAIEmbeddings", "Embeddings"], - "category": "Embeddings", - "description": "OpenAI API to generate embeddings for a given text", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["openAIApi"], - "id": "openAIEmbeddings_0-input-credential-credential" - }, - { - "label": "Model Name", - "name": "modelName", - "type": "asyncOptions", - "loadMethod": "listModels", - "default": "text-embedding-ada-002", - "id": "openAIEmbeddings_0-input-modelName-asyncOptions" - }, - { - "label": "Strip New Lines", - "name": "stripNewLines", - "type": "boolean", - "optional": true, - "additionalParams": true, - "id": "openAIEmbeddings_0-input-stripNewLines-boolean" - }, - { - "label": "Batch Size", - "name": "batchSize", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "openAIEmbeddings_0-input-batchSize-number" - }, - { - "label": "Timeout", - "name": "timeout", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "openAIEmbeddings_0-input-timeout-number" - }, - { - "label": "BasePath", - "name": "basepath", - "type": "string", - "optional": true, - "additionalParams": true, - "id": "openAIEmbeddings_0-input-basepath-string" - }, - { - "label": "Dimensions", - "name": "dimensions", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "openAIEmbeddings_0-input-dimensions-number" - } - ], - "inputAnchors": [], - "inputs": { - "modelName": "text-embedding-ada-002", - "stripNewLines": "", - "batchSize": "", - "timeout": "", - "basepath": "", - "dimensions": "" - }, - "outputAnchors": [ - { - "id": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", - "name": "openAIEmbeddings", - "label": "OpenAIEmbeddings", - "description": "OpenAI API to generate embeddings for a given text", - "type": "OpenAIEmbeddings | Embeddings" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 1154.293946350955, - "y": -589.6072684085893 - }, - "dragging": false - }, - { - "id": "stickyNote_0", - "position": { - "x": 753.8985547694751, - "y": -597.2403700691232 - }, - "type": "stickyNote", - "data": { - "id": "stickyNote_0", - "label": "Sticky Note", - "version": 2, - "name": "stickyNote", - "type": "StickyNote", - "baseClasses": ["StickyNote"], - "tags": ["Utilities"], - "category": "Utilities", - "description": "Add a sticky note", - "inputParams": [ - { - "label": "", - "name": "note", - "type": "string", - "rows": 1, - "placeholder": "Type something here", - "optional": true, - "id": "stickyNote_0-input-note-string" - } - ], - "inputAnchors": [], - "inputs": { - "note": "First, we rephrase the question using context from previous conversation.\n\nThis is to ensure that a follow-up question can be asked. For example:\n\n- What is the address of the Bakery shop?\n- What about the opening time?\n\nA rephrased question will be:\n- What is the opening time of the Bakery shop?\n\nThis ensure a better search to vector store, hence better results" - }, - "outputAnchors": [ - { - "id": "stickyNote_0-output-stickyNote-StickyNote", - "name": "stickyNote", - "label": "StickyNote", - "description": "Add a sticky note", - "type": "StickyNote" - } - ], - "outputs": {}, - "selected": false - }, - "width": 300, - "height": 324, - "selected": false, - "positionAbsolute": { - "x": 753.8985547694751, - "y": -597.2403700691232 - }, - "dragging": false - }, - { - "id": "stickyNote_1", - "position": { - "x": 1904.305205441637, - "y": -241.45986503369568 - }, - "type": "stickyNote", - "data": { - "id": "stickyNote_1", - "label": "Sticky Note", - "version": 2, - "name": "stickyNote", - "type": "StickyNote", - "baseClasses": ["StickyNote"], - "tags": ["Utilities"], - "category": "Utilities", - "description": "Add a sticky note", - "inputParams": [ - { - "label": "", - "name": "note", - "type": "string", - "rows": 1, - "placeholder": "Type something here", - "optional": true, - "id": "stickyNote_1-input-note-string" - } - ], - "inputAnchors": [], - "inputs": { - "note": "Second, rephrased question is used to do a similarity search to find relevant context" - }, - "outputAnchors": [ - { - "id": "stickyNote_1-output-stickyNote-StickyNote", - "name": "stickyNote", - "label": "StickyNote", - "description": "Add a sticky note", - "type": "StickyNote" - } - ], - "outputs": {}, - "selected": false - }, - "width": 300, - "height": 62, - "selected": false, - "positionAbsolute": { - "x": 1904.305205441637, - "y": -241.45986503369568 - }, - "dragging": false - }, - { - "id": "stickyNote_2", - "position": { - "x": 2717.983596010546, - "y": -369.73223420234956 - }, - "type": "stickyNote", - "data": { - "id": "stickyNote_2", - "label": "Sticky Note", - "version": 2, - "name": "stickyNote", - "type": "StickyNote", - "baseClasses": ["StickyNote"], - "tags": ["Utilities"], - "category": "Utilities", - "description": "Add a sticky note", - "inputParams": [ - { - "label": "", - "name": "note", - "type": "string", - "rows": 1, - "placeholder": "Type something here", - "optional": true, - "id": "stickyNote_2-input-note-string" - } - ], - "inputAnchors": [], - "inputs": { - "note": "Last, using the context from vector store, we instruct LLM to give a final response" - }, - "outputAnchors": [ - { - "id": "stickyNote_2-output-stickyNote-StickyNote", - "name": "stickyNote", - "label": "StickyNote", - "description": "Add a sticky note", - "type": "StickyNote" - } - ], - "outputs": {}, - "selected": false - }, - "width": 300, - "height": 62, - "selected": false, - "positionAbsolute": { - "x": 2717.983596010546, - "y": -369.73223420234956 - }, - "dragging": false - } - ], - "edges": [ - { - "source": "vectorStoreToDocument_0", - "sourceHandle": "vectorStoreToDocument_0-output-text-string|json", - "target": "chatPromptTemplate_0", - "targetHandle": "chatPromptTemplate_0-input-promptValues-json", - "type": "buttonedge", - "id": "vectorStoreToDocument_0-vectorStoreToDocument_0-output-text-string|json-chatPromptTemplate_0-chatPromptTemplate_0-input-promptValues-json", - "data": { - "label": "" - } - }, - { - "source": "promptTemplate_0", - "sourceHandle": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate|Runnable", - "target": "llmChain_2", - "targetHandle": "llmChain_2-input-prompt-BasePromptTemplate", - "type": "buttonedge", - "id": "promptTemplate_0-promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate|Runnable-llmChain_2-llmChain_2-input-prompt-BasePromptTemplate", - "data": { - "label": "" - } - }, - { - "source": "llmChain_2", - "sourceHandle": "llmChain_2-output-outputPrediction-string|json", - "target": "vectorStoreToDocument_0", - "targetHandle": "vectorStoreToDocument_0-input-query-string", - "type": "buttonedge", - "id": "llmChain_2-llmChain_2-output-outputPrediction-string|json-vectorStoreToDocument_0-vectorStoreToDocument_0-input-query-string", - "data": { - "label": "" - } - }, - { - "source": "chatPromptTemplate_0", - "sourceHandle": "chatPromptTemplate_0-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate|Runnable", - "target": "llmChain_1", - "targetHandle": "llmChain_1-input-prompt-BasePromptTemplate", - "type": "buttonedge", - "id": "chatPromptTemplate_0-chatPromptTemplate_0-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate|Runnable-llmChain_1-llmChain_1-input-prompt-BasePromptTemplate", - "data": { - "label": "" - } - }, - { - "source": "chatOpenAI_0", - "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", - "target": "llmChain_2", - "targetHandle": "llmChain_2-input-model-BaseLanguageModel", - "type": "buttonedge", - "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-llmChain_2-llmChain_2-input-model-BaseLanguageModel", - "data": { - "label": "" - } - }, - { - "source": "chatOpenAI_1", - "sourceHandle": "chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", - "target": "llmChain_1", - "targetHandle": "llmChain_1-input-model-BaseLanguageModel", - "type": "buttonedge", - "id": "chatOpenAI_1-chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-llmChain_1-llmChain_1-input-model-BaseLanguageModel", - "data": { - "label": "" - } - }, - { - "source": "singlestore_0", - "sourceHandle": "singlestore_0-output-vectorStore-SingleStore|VectorStore", - "target": "vectorStoreToDocument_0", - "targetHandle": "vectorStoreToDocument_0-input-vectorStore-VectorStore", - "type": "buttonedge", - "id": "singlestore_0-singlestore_0-output-vectorStore-SingleStore|VectorStore-vectorStoreToDocument_0-vectorStoreToDocument_0-input-vectorStore-VectorStore", - "data": { - "label": "" - } - }, - { - "source": "openAIEmbeddings_0", - "sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", - "target": "singlestore_0", - "targetHandle": "singlestore_0-input-embeddings-Embeddings", - "type": "buttonedge", - "id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-singlestore_0-singlestore_0-input-embeddings-Embeddings", - "data": { - "label": "" - } - } - ] -} diff --git a/packages/server/marketplaces/chatflows/SQL Prompt.json b/packages/server/marketplaces/chatflows/SQL Prompt.json deleted file mode 100644 index 4e2524b4d..000000000 --- a/packages/server/marketplaces/chatflows/SQL Prompt.json +++ /dev/null @@ -1,1983 +0,0 @@ -{ - "description": "Manually construct prompts to query a SQL database", - "usecases": ["SQL"], - "framework": ["Langchain"], - "nodes": [ - { - "width": 300, - "height": 511, - "id": "promptTemplate_0", - "position": { - "x": 384.4880563109088, - "y": 253.48974179902635 - }, - "type": "customNode", - "data": { - "id": "promptTemplate_0", - "label": "Prompt Template", - "version": 1, - "name": "promptTemplate", - "type": "PromptTemplate", - "baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate", "Runnable"], - "category": "Prompts", - "description": "Schema to represent a basic prompt for an LLM", - "inputParams": [ - { - "label": "Template", - "name": "template", - "type": "string", - "rows": 4, - "placeholder": "What is a good name for a company that makes {product}?", - "id": "promptTemplate_0-input-template-string" - }, - { - "label": "Format Prompt Values", - "name": "promptValues", - "type": "json", - "optional": true, - "acceptVariable": true, - "list": true, - "id": "promptTemplate_0-input-promptValues-json" - } - ], - "inputAnchors": [], - "inputs": { - "template": "You are a MySQL expert. Given an input question, create a syntactically correct MySQL query to run.\nUnless otherwise specified, do not return more than {topK} rows.\n\nHere is the relevant table info:\n{schema}\n\nBelow are a number of examples of questions and their corresponding SQL queries.\n\nUser input: List all artists.\nSQL Query: SELECT * FROM Artist;\n\nUser input: Find all albums for the artist 'AC/DC'.\nSQL Query: SELECT * FROM Album WHERE ArtistId = (SELECT ArtistId FROM Artist WHERE Name = 'AC/DC');\n\nUser input: List all tracks in the 'Rock' genre.\nSQL Query: SELECT * FROM Track WHERE GenreId = (SELECT GenreId FROM Genre WHERE Name = 'Rock');\n\nUser input: Find the total duration of all tracks.\nSQL Query: SELECT SUM(Milliseconds) FROM Track;\n\nUser input: List all customers from Canada.\nSQL Query: SELECT * FROM Customer WHERE Country = 'Canada';\n\nUser input: {question}\nSQL query:", - "promptValues": "{\"schema\":\"{{customFunction_2.data.instance}}\",\"question\":\"{{question}}\",\"topK\":3}" - }, - "outputAnchors": [ - { - "id": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate|Runnable", - "name": "promptTemplate", - "label": "PromptTemplate", - "type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate | Runnable" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 384.4880563109088, - "y": 253.48974179902635 - }, - "dragging": false - }, - { - "width": 300, - "height": 507, - "id": "llmChain_0", - "position": { - "x": 770.4559230968546, - "y": -127.11351409346554 - }, - "type": "customNode", - "data": { - "id": "llmChain_0", - "label": "LLM Chain", - "version": 3, - "name": "llmChain", - "type": "LLMChain", - "baseClasses": ["LLMChain", "BaseChain", "Runnable"], - "category": "Chains", - "description": "Chain to run queries against LLMs", - "inputParams": [ - { - "label": "Chain Name", - "name": "chainName", - "type": "string", - "placeholder": "Name Your Chain", - "optional": true, - "id": "llmChain_0-input-chainName-string" - } - ], - "inputAnchors": [ - { - "label": "Language Model", - "name": "model", - "type": "BaseLanguageModel", - "id": "llmChain_0-input-model-BaseLanguageModel" - }, - { - "label": "Prompt", - "name": "prompt", - "type": "BasePromptTemplate", - "id": "llmChain_0-input-prompt-BasePromptTemplate" - }, - { - "label": "Output Parser", - "name": "outputParser", - "type": "BaseLLMOutputParser", - "optional": true, - "id": "llmChain_0-input-outputParser-BaseLLMOutputParser" - }, - { - "label": "Input Moderation", - "description": "Detect text that could generate harmful output and prevent it from being sent to the language model", - "name": "inputModeration", - "type": "Moderation", - "optional": true, - "list": true, - "id": "llmChain_0-input-inputModeration-Moderation" - } - ], - "inputs": { - "model": "{{chatOpenAI_0.data.instance}}", - "prompt": "{{promptTemplate_0.data.instance}}", - "outputParser": "", - "inputModeration": "", - "chainName": "SQL Query Chain" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "options": [ - { - "id": "llmChain_0-output-llmChain-LLMChain|BaseChain|Runnable", - "name": "llmChain", - "label": "LLM Chain", - "type": "LLMChain | BaseChain | Runnable" - }, - { - "id": "llmChain_0-output-outputPrediction-string|json", - "name": "outputPrediction", - "label": "Output Prediction", - "type": "string | json" - } - ], - "default": "llmChain" - } - ], - "outputs": { - "output": "outputPrediction" - }, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 770.4559230968546, - "y": -127.11351409346554 - }, - "dragging": false - }, - { - "width": 300, - "height": 669, - "id": "chatOpenAI_0", - "position": { - "x": 376.92707114970364, - "y": -666.8088336865496 - }, - "type": "customNode", - "data": { - "id": "chatOpenAI_0", - "label": "ChatOpenAI", - "version": 6, - "name": "chatOpenAI", - "type": "ChatOpenAI", - "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "Runnable"], - "category": "Chat Models", - "description": "Wrapper around OpenAI large language models that use the Chat endpoint", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["openAIApi"], - "id": "chatOpenAI_0-input-credential-credential" - }, - { - "label": "Model Name", - "name": "modelName", - "type": "asyncOptions", - "loadMethod": "listModels", - "default": "gpt-3.5-turbo", - "id": "chatOpenAI_0-input-modelName-options" - }, - { - "label": "Temperature", - "name": "temperature", - "type": "number", - "step": 0.1, - "default": 0.9, - "optional": true, - "id": "chatOpenAI_0-input-temperature-number" - }, - { - "label": "Max Tokens", - "name": "maxTokens", - "type": "number", - "step": 1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-maxTokens-number" - }, - { - "label": "Top Probability", - "name": "topP", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-topP-number" - }, - { - "label": "Frequency Penalty", - "name": "frequencyPenalty", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-frequencyPenalty-number" - }, - { - "label": "Presence Penalty", - "name": "presencePenalty", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-presencePenalty-number" - }, - { - "label": "Timeout", - "name": "timeout", - "type": "number", - "step": 1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-timeout-number" - }, - { - "label": "BasePath", - "name": "basepath", - "type": "string", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-basepath-string" - }, - { - "label": "BaseOptions", - "name": "baseOptions", - "type": "json", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-baseOptions-json" - }, - { - "label": "Allow Image Uploads", - "name": "allowImageUploads", - "type": "boolean", - "description": "Automatically uses gpt-4-vision-preview when image is being uploaded from chat. Only works with LLMChain, Conversation Chain, ReAct Agent, and Conversational Agent", - "default": false, - "optional": true, - "id": "chatOpenAI_0-input-allowImageUploads-boolean" - }, - { - "label": "Image Resolution", - "description": "This parameter controls the resolution in which the model views the image.", - "name": "imageResolution", - "type": "options", - "options": [ - { - "label": "Low", - "name": "low" - }, - { - "label": "High", - "name": "high" - }, - { - "label": "Auto", - "name": "auto" - } - ], - "default": "low", - "optional": false, - "additionalParams": true, - "id": "chatOpenAI_0-input-imageResolution-options" - } - ], - "inputAnchors": [ - { - "label": "Cache", - "name": "cache", - "type": "BaseCache", - "optional": true, - "id": "chatOpenAI_0-input-cache-BaseCache" - } - ], - "inputs": { - "cache": "", - "modelName": "gpt-3.5-turbo-16k", - "temperature": "0", - "maxTokens": "", - "topP": "", - "frequencyPenalty": "", - "presencePenalty": "", - "timeout": "", - "basepath": "", - "baseOptions": "", - "allowImageUploads": true, - "imageResolution": "low" - }, - "outputAnchors": [ - { - "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", - "name": "chatOpenAI", - "label": "ChatOpenAI", - "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | Runnable" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 376.92707114970364, - "y": -666.8088336865496 - }, - "dragging": false - }, - { - "width": 300, - "height": 669, - "id": "chatOpenAI_1", - "position": { - "x": 2653.726672579251, - "y": -665.8849139437705 - }, - "type": "customNode", - "data": { - "id": "chatOpenAI_1", - "label": "ChatOpenAI", - "version": 6, - "name": "chatOpenAI", - "type": "ChatOpenAI", - "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "Runnable"], - "category": "Chat Models", - "description": "Wrapper around OpenAI large language models that use the Chat endpoint", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["openAIApi"], - "id": "chatOpenAI_1-input-credential-credential" - }, - { - "label": "Model Name", - "name": "modelName", - "type": "asyncOptions", - "loadMethod": "listModels", - "default": "gpt-3.5-turbo", - "id": "chatOpenAI_1-input-modelName-options" - }, - { - "label": "Temperature", - "name": "temperature", - "type": "number", - "step": 0.1, - "default": 0.9, - "optional": true, - "id": "chatOpenAI_1-input-temperature-number" - }, - { - "label": "Max Tokens", - "name": "maxTokens", - "type": "number", - "step": 1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_1-input-maxTokens-number" - }, - { - "label": "Top Probability", - "name": "topP", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_1-input-topP-number" - }, - { - "label": "Frequency Penalty", - "name": "frequencyPenalty", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_1-input-frequencyPenalty-number" - }, - { - "label": "Presence Penalty", - "name": "presencePenalty", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_1-input-presencePenalty-number" - }, - { - "label": "Timeout", - "name": "timeout", - "type": "number", - "step": 1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_1-input-timeout-number" - }, - { - "label": "BasePath", - "name": "basepath", - "type": "string", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_1-input-basepath-string" - }, - { - "label": "BaseOptions", - "name": "baseOptions", - "type": "json", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_1-input-baseOptions-json" - }, - { - "label": "Allow Image Uploads", - "name": "allowImageUploads", - "type": "boolean", - "description": "Automatically uses gpt-4-vision-preview when image is being uploaded from chat. Only works with LLMChain, Conversation Chain, ReAct Agent, and Conversational Agent", - "default": false, - "optional": true, - "id": "chatOpenAI_1-input-allowImageUploads-boolean" - }, - { - "label": "Image Resolution", - "description": "This parameter controls the resolution in which the model views the image.", - "name": "imageResolution", - "type": "options", - "options": [ - { - "label": "Low", - "name": "low" - }, - { - "label": "High", - "name": "high" - }, - { - "label": "Auto", - "name": "auto" - } - ], - "default": "low", - "optional": false, - "additionalParams": true, - "id": "chatOpenAI_1-input-imageResolution-options" - } - ], - "inputAnchors": [ - { - "label": "Cache", - "name": "cache", - "type": "BaseCache", - "optional": true, - "id": "chatOpenAI_1-input-cache-BaseCache" - } - ], - "inputs": { - "cache": "", - "modelName": "gpt-3.5-turbo-16k", - "temperature": "0", - "maxTokens": "", - "topP": "", - "frequencyPenalty": "", - "presencePenalty": "", - "timeout": "", - "basepath": "", - "baseOptions": "", - "allowImageUploads": true, - "imageResolution": "low" - }, - "outputAnchors": [ - { - "id": "chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", - "name": "chatOpenAI", - "label": "ChatOpenAI", - "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | Runnable" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 2653.726672579251, - "y": -665.8849139437705 - }, - "dragging": false - }, - { - "width": 300, - "height": 507, - "id": "llmChain_1", - "position": { - "x": 3089.9937691022837, - "y": -109.24001734925716 - }, - "type": "customNode", - "data": { - "id": "llmChain_1", - "label": "LLM Chain", - "version": 3, - "name": "llmChain", - "type": "LLMChain", - "baseClasses": ["LLMChain", "BaseChain", "Runnable"], - "category": "Chains", - "description": "Chain to run queries against LLMs", - "inputParams": [ - { - "label": "Chain Name", - "name": "chainName", - "type": "string", - "placeholder": "Name Your Chain", - "optional": true, - "id": "llmChain_1-input-chainName-string" - } - ], - "inputAnchors": [ - { - "label": "Language Model", - "name": "model", - "type": "BaseLanguageModel", - "id": "llmChain_1-input-model-BaseLanguageModel" - }, - { - "label": "Prompt", - "name": "prompt", - "type": "BasePromptTemplate", - "id": "llmChain_1-input-prompt-BasePromptTemplate" - }, - { - "label": "Output Parser", - "name": "outputParser", - "type": "BaseLLMOutputParser", - "optional": true, - "id": "llmChain_1-input-outputParser-BaseLLMOutputParser" - }, - { - "label": "Input Moderation", - "description": "Detect text that could generate harmful output and prevent it from being sent to the language model", - "name": "inputModeration", - "type": "Moderation", - "optional": true, - "list": true, - "id": "llmChain_1-input-inputModeration-Moderation" - } - ], - "inputs": { - "model": "{{chatOpenAI_1.data.instance}}", - "prompt": "{{promptTemplate_1.data.instance}}", - "outputParser": "", - "inputModeration": "", - "chainName": "Final Chain" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "options": [ - { - "id": "llmChain_1-output-llmChain-LLMChain|BaseChain|Runnable", - "name": "llmChain", - "label": "LLM Chain", - "type": "LLMChain | BaseChain | Runnable" - }, - { - "id": "llmChain_1-output-outputPrediction-string|json", - "name": "outputPrediction", - "label": "Output Prediction", - "type": "string | json" - } - ], - "default": "llmChain" - } - ], - "outputs": { - "output": "llmChain" - }, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 3089.9937691022837, - "y": -109.24001734925716 - }, - "dragging": false - }, - { - "width": 300, - "height": 674, - "id": "customFunction_2", - "position": { - "x": -19.95227863012829, - "y": -125.50600296188355 - }, - "type": "customNode", - "data": { - "id": "customFunction_2", - "label": "Custom JS Function", - "version": 2, - "name": "customFunction", - "type": "CustomFunction", - "baseClasses": ["CustomFunction", "Utilities"], - "tags": ["Utilities"], - "category": "Utilities", - "description": "Execute custom javascript function", - "inputParams": [ - { - "label": "Input Variables", - "name": "functionInputVariables", - "description": "Input variables can be used in the function with prefix $. For example: $var", - "type": "json", - "optional": true, - "acceptVariable": true, - "list": true, - "id": "customFunction_2-input-functionInputVariables-json" - }, - { - "label": "Function Name", - "name": "functionName", - "type": "string", - "optional": true, - "placeholder": "My Function", - "id": "customFunction_2-input-functionName-string" - }, - { - "label": "Javascript Function", - "name": "javascriptFunction", - "type": "code", - "id": "customFunction_2-input-javascriptFunction-code" - } - ], - "inputAnchors": [], - "inputs": { - "functionInputVariables": "", - "functionName": "Get SQL Schema Prompt", - "javascriptFunction": "const HOST = 'singlestore-host.com';\nconst USER = 'admin';\nconst PASSWORD = 'mypassword';\nconst DATABASE = 'mydb';\nconst TABLE = 'samples';\nconst mysql = require('mysql2/promise');\n\nlet sqlSchemaPrompt;\n\n/**\n * Ideal prompt contains schema info and examples\n * Follows best practices as specified form https://arxiv.org/abs/2204.00498\n * =========================================\n * CREATE TABLE samples (firstName varchar NOT NULL, lastName varchar)\n * SELECT * FROM samples LIMIT 3\n * firstName lastName\n * Stephen Tyler\n * Jack McGinnis\n * Steven Repici\n * =========================================\n*/\nfunction getSQLPrompt() {\n return new Promise(async (resolve, reject) => {\n try {\n const singleStoreConnection = mysql.createPool({\n host: HOST,\n user: USER,\n password: PASSWORD,\n database: DATABASE,\n });\n \n // Get schema info\n const [schemaInfo] = await singleStoreConnection.execute(\n `SELECT * FROM INFORMATION_SCHEMA.COLUMNS WHERE table_name = \"${TABLE}\"`\n );\n \n const createColumns = [];\n const columnNames = [];\n \n for (const schemaData of schemaInfo) {\n columnNames.push(`${schemaData['COLUMN_NAME']}`);\n createColumns.push(`${schemaData['COLUMN_NAME']} ${schemaData['COLUMN_TYPE']} ${schemaData['IS_NULLABLE'] === 'NO' ? 'NOT NULL' : ''}`);\n }\n \n const sqlCreateTableQuery = `CREATE TABLE samples (${createColumns.join(', ')})`;\n const sqlSelectTableQuery = `SELECT * FROM samples LIMIT 3`;\n \n // Get first 3 rows\n const [rows] = await singleStoreConnection.execute(\n sqlSelectTableQuery,\n );\n \n const allValues = [];\n for (const row of rows) {\n const rowValues = [];\n for (const colName in row) {\n rowValues.push(row[colName]);\n }\n allValues.push(rowValues.join(' '));\n }\n \n sqlSchemaPrompt = sqlCreateTableQuery + '\\n' + sqlSelectTableQuery + '\\n' + columnNames.join(' ') + '\\n' + allValues.join('\\n');\n \n resolve();\n } catch (e) {\n console.error(e);\n return reject(e);\n }\n });\n}\n\nasync function main() {\n await getSQLPrompt();\n}\n\nawait main();\n\nreturn sqlSchemaPrompt;" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "description": "", - "options": [ - { - "id": "customFunction_2-output-output-string|number|boolean|json|array", - "name": "output", - "label": "Output", - "description": "", - "type": "string | number | boolean | json | array" - }, - { - "id": "customFunction_2-output-EndingNode-CustomFunction", - "name": "EndingNode", - "label": "Ending Node", - "description": "", - "type": "CustomFunction" - } - ], - "default": "output" - } - ], - "outputs": { - "output": "output" - }, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": -19.95227863012829, - "y": -125.50600296188355 - }, - "dragging": false - }, - { - "width": 300, - "height": 674, - "id": "customFunction_1", - "position": { - "x": 1887.4670208331604, - "y": -275.95340782935716 - }, - "type": "customNode", - "data": { - "id": "customFunction_1", - "label": "Custom JS Function", - "version": 2, - "name": "customFunction", - "type": "CustomFunction", - "baseClasses": ["CustomFunction", "Utilities"], - "tags": ["Utilities"], - "category": "Utilities", - "description": "Execute custom javascript function", - "inputParams": [ - { - "label": "Input Variables", - "name": "functionInputVariables", - "description": "Input variables can be used in the function with prefix $. For example: $var", - "type": "json", - "optional": true, - "acceptVariable": true, - "list": true, - "id": "customFunction_1-input-functionInputVariables-json" - }, - { - "label": "Function Name", - "name": "functionName", - "type": "string", - "optional": true, - "placeholder": "My Function", - "id": "customFunction_1-input-functionName-string" - }, - { - "label": "Javascript Function", - "name": "javascriptFunction", - "type": "code", - "id": "customFunction_1-input-javascriptFunction-code" - } - ], - "inputAnchors": [], - "inputs": { - "functionInputVariables": "{\"sqlQuery\":\"{{setVariable_1.data.instance}}\"}", - "functionName": "Run SQL Query", - "javascriptFunction": "const HOST = 'singlestore-host.com';\nconst USER = 'admin';\nconst PASSWORD = 'mypassword';\nconst DATABASE = 'mydb';\nconst TABLE = 'samples';\nconst mysql = require('mysql2/promise');\n\nlet result;\n\nfunction getSQLResult() {\n return new Promise(async (resolve, reject) => {\n try {\n const singleStoreConnection = mysql.createPool({\n host: HOST,\n user: USER,\n password: PASSWORD,\n database: DATABASE,\n });\n \n const [rows] = await singleStoreConnection.execute(\n $sqlQuery\n );\n \n result = JSON.stringify(rows)\n \n resolve();\n } catch (e) {\n console.error(e);\n return reject(e);\n }\n });\n}\n\nasync function main() {\n await getSQLResult();\n}\n\nawait main();\n\nreturn result;" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "description": "", - "options": [ - { - "id": "customFunction_1-output-output-string|number|boolean|json|array", - "name": "output", - "label": "Output", - "description": "", - "type": "string | number | boolean | json | array" - }, - { - "id": "customFunction_1-output-EndingNode-CustomFunction", - "name": "EndingNode", - "label": "Ending Node", - "description": "", - "type": "CustomFunction" - } - ], - "default": "output" - } - ], - "outputs": { - "output": "output" - }, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 1887.4670208331604, - "y": -275.95340782935716 - }, - "dragging": false - }, - { - "width": 300, - "height": 511, - "id": "promptTemplate_1", - "position": { - "x": 2655.2632506040304, - "y": 218.145615216618 - }, - "type": "customNode", - "data": { - "id": "promptTemplate_1", - "label": "Prompt Template", - "version": 1, - "name": "promptTemplate", - "type": "PromptTemplate", - "baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate", "Runnable"], - "category": "Prompts", - "description": "Schema to represent a basic prompt for an LLM", - "inputParams": [ - { - "label": "Template", - "name": "template", - "type": "string", - "rows": 4, - "placeholder": "What is a good name for a company that makes {product}?", - "id": "promptTemplate_1-input-template-string" - }, - { - "label": "Format Prompt Values", - "name": "promptValues", - "type": "json", - "optional": true, - "acceptVariable": true, - "list": true, - "id": "promptTemplate_1-input-promptValues-json" - } - ], - "inputAnchors": [], - "inputs": { - "template": "Given the following user question, corresponding SQL query, and SQL result, answer the user question as details as possible.\n\nQuestion: {question}\n\nSQL Query: {sqlQuery}\n\nSQL Result: {sqlResponse}\n\nAnswer:\n", - "promptValues": "{\"question\":\"{{question}}\",\"sqlResponse\":\"{{customFunction_1.data.instance}}\",\"sqlQuery\":\"{{getVariable_1.data.instance}}\"}" - }, - "outputAnchors": [ - { - "id": "promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate|Runnable", - "name": "promptTemplate", - "label": "PromptTemplate", - "type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate | Runnable" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "dragging": false, - "positionAbsolute": { - "x": 2655.2632506040304, - "y": 218.145615216618 - } - }, - { - "width": 300, - "height": 304, - "id": "getVariable_1", - "position": { - "x": 2272.8555266616872, - "y": 24.11364076336241 - }, - "type": "customNode", - "data": { - "id": "getVariable_1", - "label": "Get Variable", - "version": 2, - "name": "getVariable", - "type": "GetVariable", - "baseClasses": ["GetVariable", "Utilities"], - "tags": ["Utilities"], - "category": "Utilities", - "description": "Get variable that was saved using Set Variable node", - "inputParams": [ - { - "label": "Variable Name", - "name": "variableName", - "type": "string", - "placeholder": "var1", - "id": "getVariable_1-input-variableName-string" - } - ], - "inputAnchors": [], - "inputs": { - "variableName": "sqlQuery" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "description": "", - "options": [ - { - "id": "getVariable_1-output-output-string|number|boolean|json|array", - "name": "output", - "label": "Output", - "description": "", - "type": "string | number | boolean | json | array" - } - ], - "default": "output" - } - ], - "outputs": { - "output": "output" - }, - "selected": false - }, - "positionAbsolute": { - "x": 2272.8555266616872, - "y": 24.11364076336241 - }, - "selected": false, - "dragging": false - }, - { - "width": 300, - "height": 355, - "id": "setVariable_1", - "position": { - "x": 1516.338224315744, - "y": -133.6986023683283 - }, - "type": "customNode", - "data": { - "id": "setVariable_1", - "label": "Set Variable", - "version": 2, - "name": "setVariable", - "type": "SetVariable", - "baseClasses": ["SetVariable", "Utilities"], - "tags": ["Utilities"], - "category": "Utilities", - "description": "Set variable which can be retrieved at a later stage. Variable is only available during runtime.", - "inputParams": [ - { - "label": "Variable Name", - "name": "variableName", - "type": "string", - "placeholder": "var1", - "id": "setVariable_1-input-variableName-string" - } - ], - "inputAnchors": [ - { - "label": "Input", - "name": "input", - "type": "string | number | boolean | json | array", - "optional": true, - "list": true, - "id": "setVariable_1-input-input-string | number | boolean | json | array" - } - ], - "inputs": { - "input": ["{{ifElseFunction_0.data.instance}}"], - "variableName": "sqlQuery" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "description": "", - "options": [ - { - "id": "setVariable_1-output-output-string|number|boolean|json|array", - "name": "output", - "label": "Output", - "description": "", - "type": "string | number | boolean | json | array" - } - ], - "default": "output" - } - ], - "outputs": { - "output": "output" - }, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 1516.338224315744, - "y": -133.6986023683283 - }, - "dragging": false - }, - { - "width": 300, - "height": 765, - "id": "ifElseFunction_0", - "position": { - "x": 1147.8020838770517, - "y": -237.39478763322148 - }, - "type": "customNode", - "data": { - "id": "ifElseFunction_0", - "label": "IfElse Function", - "version": 2, - "name": "ifElseFunction", - "type": "IfElseFunction", - "baseClasses": ["IfElseFunction", "Utilities"], - "tags": ["Utilities"], - "category": "Utilities", - "description": "Split flows based on If Else javascript functions", - "inputParams": [ - { - "label": "Input Variables", - "name": "functionInputVariables", - "description": "Input variables can be used in the function with prefix $. For example: $var", - "type": "json", - "optional": true, - "acceptVariable": true, - "list": true, - "id": "ifElseFunction_0-input-functionInputVariables-json" - }, - { - "label": "IfElse Name", - "name": "functionName", - "type": "string", - "optional": true, - "placeholder": "If Condition Match", - "id": "ifElseFunction_0-input-functionName-string" - }, - { - "label": "If Function", - "name": "ifFunction", - "description": "Function must return a value", - "type": "code", - "rows": 2, - "default": "if (\"hello\" == \"hello\") {\n return true;\n}", - "id": "ifElseFunction_0-input-ifFunction-code" - }, - { - "label": "Else Function", - "name": "elseFunction", - "description": "Function must return a value", - "type": "code", - "rows": 2, - "default": "return false;", - "id": "ifElseFunction_0-input-elseFunction-code" - } - ], - "inputAnchors": [], - "inputs": { - "functionInputVariables": "{\"sqlQuery\":\"{{llmChain_0.data.instance}}\"}", - "functionName": "IF SQL Query contains SELECT and WHERE", - "ifFunction": "const sqlQuery = $sqlQuery.trim();\n\nconst regex = /SELECT\\s.*?(?:\\n|$)/gi;\n\n// Extracting the SQL part\nconst matches = sqlQuery.match(regex);\nconst cleanSql = matches ? matches[0].trim() : \"\";\n\nif (cleanSql.includes(\"SELECT\") && cleanSql.includes(\"WHERE\")) {\n return cleanSql;\n}", - "elseFunction": "return $sqlQuery;" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "description": "", - "options": [ - { - "id": "ifElseFunction_0-output-returnTrue-string|number|boolean|json|array", - "name": "returnTrue", - "label": "True", - "description": "", - "type": "string | number | boolean | json | array" - }, - { - "id": "ifElseFunction_0-output-returnFalse-string|number|boolean|json|array", - "name": "returnFalse", - "label": "False", - "description": "", - "type": "string | number | boolean | json | array" - } - ], - "default": "returnTrue" - } - ], - "outputs": { - "output": "returnTrue" - }, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 1147.8020838770517, - "y": -237.39478763322148 - }, - "dragging": false - }, - { - "width": 300, - "height": 511, - "id": "promptTemplate_2", - "position": { - "x": 1193.7489579044463, - "y": 615.4009446588724 - }, - "type": "customNode", - "data": { - "id": "promptTemplate_2", - "label": "Prompt Template", - "version": 1, - "name": "promptTemplate", - "type": "PromptTemplate", - "baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate", "Runnable"], - "category": "Prompts", - "description": "Schema to represent a basic prompt for an LLM", - "inputParams": [ - { - "label": "Template", - "name": "template", - "type": "string", - "rows": 4, - "placeholder": "What is a good name for a company that makes {product}?", - "id": "promptTemplate_2-input-template-string" - }, - { - "label": "Format Prompt Values", - "name": "promptValues", - "type": "json", - "optional": true, - "acceptVariable": true, - "list": true, - "id": "promptTemplate_2-input-promptValues-json" - } - ], - "inputAnchors": [], - "inputs": { - "template": "Politely say \"I'm not able to answer query\"", - "promptValues": "{\"schema\":\"{{setVariable_0.data.instance}}\",\"question\":\"{{question}}\"}" - }, - "outputAnchors": [ - { - "id": "promptTemplate_2-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate|Runnable", - "name": "promptTemplate", - "label": "PromptTemplate", - "type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate | Runnable" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 1193.7489579044463, - "y": 615.4009446588724 - }, - "dragging": false - }, - { - "width": 300, - "height": 669, - "id": "chatOpenAI_2", - "position": { - "x": 1545.1023725538003, - "y": 493.5495798408175 - }, - "type": "customNode", - "data": { - "id": "chatOpenAI_2", - "label": "ChatOpenAI", - "version": 6, - "name": "chatOpenAI", - "type": "ChatOpenAI", - "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "Runnable"], - "category": "Chat Models", - "description": "Wrapper around OpenAI large language models that use the Chat endpoint", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["openAIApi"], - "id": "chatOpenAI_2-input-credential-credential" - }, - { - "label": "Model Name", - "name": "modelName", - "type": "asyncOptions", - "loadMethod": "listModels", - "default": "gpt-3.5-turbo", - "id": "chatOpenAI_2-input-modelName-options" - }, - { - "label": "Temperature", - "name": "temperature", - "type": "number", - "step": 0.1, - "default": 0.9, - "optional": true, - "id": "chatOpenAI_2-input-temperature-number" - }, - { - "label": "Max Tokens", - "name": "maxTokens", - "type": "number", - "step": 1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_2-input-maxTokens-number" - }, - { - "label": "Top Probability", - "name": "topP", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_2-input-topP-number" - }, - { - "label": "Frequency Penalty", - "name": "frequencyPenalty", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_2-input-frequencyPenalty-number" - }, - { - "label": "Presence Penalty", - "name": "presencePenalty", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_2-input-presencePenalty-number" - }, - { - "label": "Timeout", - "name": "timeout", - "type": "number", - "step": 1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_2-input-timeout-number" - }, - { - "label": "BasePath", - "name": "basepath", - "type": "string", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_2-input-basepath-string" - }, - { - "label": "BaseOptions", - "name": "baseOptions", - "type": "json", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_2-input-baseOptions-json" - }, - { - "label": "Allow Image Uploads", - "name": "allowImageUploads", - "type": "boolean", - "description": "Automatically uses gpt-4-vision-preview when image is being uploaded from chat. Only works with LLMChain, Conversation Chain, ReAct Agent, and Conversational Agent", - "default": false, - "optional": true, - "id": "chatOpenAI_2-input-allowImageUploads-boolean" - }, - { - "label": "Image Resolution", - "description": "This parameter controls the resolution in which the model views the image.", - "name": "imageResolution", - "type": "options", - "options": [ - { - "label": "Low", - "name": "low" - }, - { - "label": "High", - "name": "high" - }, - { - "label": "Auto", - "name": "auto" - } - ], - "default": "low", - "optional": false, - "additionalParams": true, - "id": "chatOpenAI_2-input-imageResolution-options" - } - ], - "inputAnchors": [ - { - "label": "Cache", - "name": "cache", - "type": "BaseCache", - "optional": true, - "id": "chatOpenAI_2-input-cache-BaseCache" - } - ], - "inputs": { - "cache": "", - "modelName": "gpt-3.5-turbo-16k", - "temperature": "0.7", - "maxTokens": "", - "topP": "", - "frequencyPenalty": "", - "presencePenalty": "", - "timeout": "", - "basepath": "", - "baseOptions": "", - "allowImageUploads": true, - "imageResolution": "low" - }, - "outputAnchors": [ - { - "id": "chatOpenAI_2-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", - "name": "chatOpenAI", - "label": "ChatOpenAI", - "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | Runnable" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 1545.1023725538003, - "y": 493.5495798408175 - }, - "dragging": false - }, - { - "width": 300, - "height": 507, - "id": "llmChain_2", - "position": { - "x": 1914.509823868027, - "y": 622.3435967391327 - }, - "type": "customNode", - "data": { - "id": "llmChain_2", - "label": "LLM Chain", - "version": 3, - "name": "llmChain", - "type": "LLMChain", - "baseClasses": ["LLMChain", "BaseChain", "Runnable"], - "category": "Chains", - "description": "Chain to run queries against LLMs", - "inputParams": [ - { - "label": "Chain Name", - "name": "chainName", - "type": "string", - "placeholder": "Name Your Chain", - "optional": true, - "id": "llmChain_2-input-chainName-string" - } - ], - "inputAnchors": [ - { - "label": "Language Model", - "name": "model", - "type": "BaseLanguageModel", - "id": "llmChain_2-input-model-BaseLanguageModel" - }, - { - "label": "Prompt", - "name": "prompt", - "type": "BasePromptTemplate", - "id": "llmChain_2-input-prompt-BasePromptTemplate" - }, - { - "label": "Output Parser", - "name": "outputParser", - "type": "BaseLLMOutputParser", - "optional": true, - "id": "llmChain_2-input-outputParser-BaseLLMOutputParser" - }, - { - "label": "Input Moderation", - "description": "Detect text that could generate harmful output and prevent it from being sent to the language model", - "name": "inputModeration", - "type": "Moderation", - "optional": true, - "list": true, - "id": "llmChain_2-input-inputModeration-Moderation" - } - ], - "inputs": { - "model": "{{chatOpenAI_2.data.instance}}", - "prompt": "{{promptTemplate_2.data.instance}}", - "outputParser": "", - "inputModeration": "", - "chainName": "Fallback Chain" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "options": [ - { - "id": "llmChain_2-output-llmChain-LLMChain|BaseChain|Runnable", - "name": "llmChain", - "label": "LLM Chain", - "type": "LLMChain | BaseChain | Runnable" - }, - { - "id": "llmChain_2-output-outputPrediction-string|json", - "name": "outputPrediction", - "label": "Output Prediction", - "type": "string | json" - } - ], - "default": "llmChain" - } - ], - "outputs": { - "output": "llmChain" - }, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 1914.509823868027, - "y": 622.3435967391327 - }, - "dragging": false - }, - { - "id": "stickyNote_0", - "position": { - "x": -18.950231412347364, - "y": -192.2980180516393 - }, - "type": "stickyNote", - "data": { - "id": "stickyNote_0", - "label": "Sticky Note", - "version": 2, - "name": "stickyNote", - "type": "StickyNote", - "baseClasses": ["StickyNote"], - "tags": ["Utilities"], - "category": "Utilities", - "description": "Add a sticky note", - "inputParams": [ - { - "label": "", - "name": "note", - "type": "string", - "rows": 1, - "placeholder": "Type something here", - "optional": true, - "id": "stickyNote_0-input-note-string" - } - ], - "inputAnchors": [], - "inputs": { - "note": "First, get SQL database schema" - }, - "outputAnchors": [ - { - "id": "stickyNote_0-output-stickyNote-StickyNote", - "name": "stickyNote", - "label": "StickyNote", - "description": "Add a sticky note", - "type": "StickyNote" - } - ], - "outputs": {}, - "selected": false - }, - "width": 300, - "height": 42, - "selected": false, - "positionAbsolute": { - "x": -18.950231412347364, - "y": -192.2980180516393 - }, - "dragging": false - }, - { - "id": "stickyNote_1", - "position": { - "x": 1510.6324834799852, - "y": -221.78240261184442 - }, - "type": "stickyNote", - "data": { - "id": "stickyNote_1", - "label": "Sticky Note", - "version": 2, - "name": "stickyNote", - "type": "StickyNote", - "baseClasses": ["StickyNote"], - "tags": ["Utilities"], - "category": "Utilities", - "description": "Add a sticky note", - "inputParams": [ - { - "label": "", - "name": "note", - "type": "string", - "rows": 1, - "placeholder": "Type something here", - "optional": true, - "id": "stickyNote_1-input-note-string" - } - ], - "inputAnchors": [], - "inputs": { - "note": "Save as variable to be used at the last Prompt Template" - }, - "outputAnchors": [ - { - "id": "stickyNote_1-output-stickyNote-StickyNote", - "name": "stickyNote", - "label": "StickyNote", - "description": "Add a sticky note", - "type": "StickyNote" - } - ], - "outputs": {}, - "selected": false - }, - "width": 300, - "height": 62, - "selected": false, - "positionAbsolute": { - "x": 1510.6324834799852, - "y": -221.78240261184442 - }, - "dragging": false - }, - { - "id": "stickyNote_2", - "position": { - "x": 386.88037412001086, - "y": 47.66735767574478 - }, - "type": "stickyNote", - "data": { - "id": "stickyNote_2", - "label": "Sticky Note", - "version": 2, - "name": "stickyNote", - "type": "StickyNote", - "baseClasses": ["StickyNote"], - "tags": ["Utilities"], - "category": "Utilities", - "description": "Add a sticky note", - "inputParams": [ - { - "label": "", - "name": "note", - "type": "string", - "rows": 1, - "placeholder": "Type something here", - "optional": true, - "id": "stickyNote_2-input-note-string" - } - ], - "inputAnchors": [], - "inputs": { - "note": "Instruct LLM to return a SQL query using the schema.\n\nRecommend to give few examples for higher accuracy. \n\nChange the prompt accordingly to suit the type of database you are using" - }, - "outputAnchors": [ - { - "id": "stickyNote_2-output-stickyNote-StickyNote", - "name": "stickyNote", - "label": "StickyNote", - "description": "Add a sticky note", - "type": "StickyNote" - } - ], - "outputs": {}, - "selected": false - }, - "width": 300, - "height": 183, - "selected": false, - "positionAbsolute": { - "x": 386.88037412001086, - "y": 47.66735767574478 - }, - "dragging": false - }, - { - "id": "stickyNote_3", - "position": { - "x": 1148.366177280569, - "y": -330.2148999791981 - }, - "type": "stickyNote", - "data": { - "id": "stickyNote_3", - "label": "Sticky Note", - "version": 2, - "name": "stickyNote", - "type": "StickyNote", - "baseClasses": ["StickyNote"], - "tags": ["Utilities"], - "category": "Utilities", - "description": "Add a sticky note", - "inputParams": [ - { - "label": "", - "name": "note", - "type": "string", - "rows": 1, - "placeholder": "Type something here", - "optional": true, - "id": "stickyNote_3-input-note-string" - } - ], - "inputAnchors": [], - "inputs": { - "note": "Check if SQL Query is valid\n\nIf not, avoid executing it and return to user " - }, - "outputAnchors": [ - { - "id": "stickyNote_3-output-stickyNote-StickyNote", - "name": "stickyNote", - "label": "StickyNote", - "description": "Add a sticky note", - "type": "StickyNote" - } - ], - "outputs": {}, - "selected": false - }, - "width": 300, - "height": 82, - "selected": false, - "positionAbsolute": { - "x": 1148.366177280569, - "y": -330.2148999791981 - }, - "dragging": false - }, - { - "id": "stickyNote_4", - "position": { - "x": 1881.2554569013519, - "y": -435.79147130381756 - }, - "type": "stickyNote", - "data": { - "id": "stickyNote_4", - "label": "Sticky Note", - "version": 2, - "name": "stickyNote", - "type": "StickyNote", - "baseClasses": ["StickyNote"], - "tags": ["Utilities"], - "category": "Utilities", - "description": "Add a sticky note", - "inputParams": [ - { - "label": "", - "name": "note", - "type": "string", - "rows": 1, - "placeholder": "Type something here", - "optional": true, - "id": "stickyNote_4-input-note-string" - } - ], - "inputAnchors": [], - "inputs": { - "note": "Execute the SQL query after validated, and get the list of results back.\n\nTo avoid long list of results overflowing token limit, try capping the length of result here" - }, - "outputAnchors": [ - { - "id": "stickyNote_4-output-stickyNote-StickyNote", - "name": "stickyNote", - "label": "StickyNote", - "description": "Add a sticky note", - "type": "StickyNote" - } - ], - "outputs": {}, - "selected": false - }, - "width": 300, - "height": 143, - "selected": false, - "positionAbsolute": { - "x": 1881.2554569013519, - "y": -435.79147130381756 - }, - "dragging": false - }, - { - "id": "stickyNote_5", - "position": { - "x": 1545.0242031958799, - "y": 428.37859733277077 - }, - "type": "stickyNote", - "data": { - "id": "stickyNote_5", - "label": "Sticky Note", - "version": 2, - "name": "stickyNote", - "type": "StickyNote", - "baseClasses": ["StickyNote"], - "tags": ["Utilities"], - "category": "Utilities", - "description": "Add a sticky note", - "inputParams": [ - { - "label": "", - "name": "note", - "type": "string", - "rows": 1, - "placeholder": "Type something here", - "optional": true, - "id": "stickyNote_5-input-note-string" - } - ], - "inputAnchors": [], - "inputs": { - "note": "Fallback answer if SQL query is not valid" - }, - "outputAnchors": [ - { - "id": "stickyNote_5-output-stickyNote-StickyNote", - "name": "stickyNote", - "label": "StickyNote", - "description": "Add a sticky note", - "type": "StickyNote" - } - ], - "outputs": {}, - "selected": false - }, - "width": 300, - "height": 42, - "selected": false, - "positionAbsolute": { - "x": 1545.0242031958799, - "y": 428.37859733277077 - }, - "dragging": false - }, - { - "id": "stickyNote_6", - "position": { - "x": 2653.037036258241, - "y": 53.55638699917168 - }, - "type": "stickyNote", - "data": { - "id": "stickyNote_6", - "label": "Sticky Note", - "version": 2, - "name": "stickyNote", - "type": "StickyNote", - "baseClasses": ["StickyNote"], - "tags": ["Utilities"], - "category": "Utilities", - "description": "Add a sticky note", - "inputParams": [ - { - "label": "", - "name": "note", - "type": "string", - "rows": 1, - "placeholder": "Type something here", - "optional": true, - "id": "stickyNote_6-input-note-string" - } - ], - "inputAnchors": [], - "inputs": { - "note": "This is the final prompt.\n\nCombine the following:\nQuestion + SQL query + SQL result\n\nto generate a final answer" - }, - "outputAnchors": [ - { - "id": "stickyNote_6-output-stickyNote-StickyNote", - "name": "stickyNote", - "label": "StickyNote", - "description": "Add a sticky note", - "type": "StickyNote" - } - ], - "outputs": {}, - "selected": false - }, - "width": 300, - "height": 143, - "selected": false, - "positionAbsolute": { - "x": 2653.037036258241, - "y": 53.55638699917168 - }, - "dragging": false - }, - { - "id": "stickyNote_7", - "position": { - "x": 2267.355938520518, - "y": -56.64296923028309 - }, - "type": "stickyNote", - "data": { - "id": "stickyNote_7", - "label": "Sticky Note", - "version": 2, - "name": "stickyNote", - "type": "StickyNote", - "baseClasses": ["StickyNote"], - "tags": ["Utilities"], - "category": "Utilities", - "description": "Add a sticky note", - "inputParams": [ - { - "label": "", - "name": "note", - "type": "string", - "rows": 1, - "placeholder": "Type something here", - "optional": true, - "id": "stickyNote_7-input-note-string" - } - ], - "inputAnchors": [], - "inputs": { - "note": "Get the saved variable value to be used in prompt" - }, - "outputAnchors": [ - { - "id": "stickyNote_7-output-stickyNote-StickyNote", - "name": "stickyNote", - "label": "StickyNote", - "description": "Add a sticky note", - "type": "StickyNote" - } - ], - "outputs": {}, - "selected": false - }, - "width": 300, - "height": 62, - "selected": false, - "positionAbsolute": { - "x": 2267.355938520518, - "y": -56.64296923028309 - }, - "dragging": false - } - ], - "edges": [ - { - "source": "promptTemplate_0", - "sourceHandle": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate|Runnable", - "target": "llmChain_0", - "targetHandle": "llmChain_0-input-prompt-BasePromptTemplate", - "type": "buttonedge", - "id": "promptTemplate_0-promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate|Runnable-llmChain_0-llmChain_0-input-prompt-BasePromptTemplate", - "data": { - "label": "" - } - }, - { - "source": "chatOpenAI_0", - "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", - "target": "llmChain_0", - "targetHandle": "llmChain_0-input-model-BaseLanguageModel", - "type": "buttonedge", - "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-llmChain_0-llmChain_0-input-model-BaseLanguageModel", - "data": { - "label": "" - } - }, - { - "source": "chatOpenAI_1", - "sourceHandle": "chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", - "target": "llmChain_1", - "targetHandle": "llmChain_1-input-model-BaseLanguageModel", - "type": "buttonedge", - "id": "chatOpenAI_1-chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-llmChain_1-llmChain_1-input-model-BaseLanguageModel", - "data": { - "label": "" - } - }, - { - "source": "customFunction_1", - "sourceHandle": "customFunction_1-output-output-string|number|boolean|json|array", - "target": "promptTemplate_1", - "targetHandle": "promptTemplate_1-input-promptValues-json", - "type": "buttonedge", - "id": "customFunction_1-customFunction_1-output-output-string|number|boolean|json|array-promptTemplate_1-promptTemplate_1-input-promptValues-json", - "data": { - "label": "" - } - }, - { - "source": "promptTemplate_1", - "sourceHandle": "promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate|Runnable", - "target": "llmChain_1", - "targetHandle": "llmChain_1-input-prompt-BasePromptTemplate", - "type": "buttonedge", - "id": "promptTemplate_1-promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate|Runnable-llmChain_1-llmChain_1-input-prompt-BasePromptTemplate", - "data": { - "label": "" - } - }, - { - "source": "getVariable_1", - "sourceHandle": "getVariable_1-output-output-string|number|boolean|json|array", - "target": "promptTemplate_1", - "targetHandle": "promptTemplate_1-input-promptValues-json", - "type": "buttonedge", - "id": "getVariable_1-getVariable_1-output-output-string|number|boolean|json|array-promptTemplate_1-promptTemplate_1-input-promptValues-json", - "data": { - "label": "" - } - }, - { - "source": "setVariable_1", - "sourceHandle": "setVariable_1-output-output-string|number|boolean|json|array", - "target": "customFunction_1", - "targetHandle": "customFunction_1-input-functionInputVariables-json", - "type": "buttonedge", - "id": "setVariable_1-setVariable_1-output-output-string|number|boolean|json|array-customFunction_1-customFunction_1-input-functionInputVariables-json", - "data": { - "label": "" - } - }, - { - "source": "llmChain_0", - "sourceHandle": "llmChain_0-output-outputPrediction-string|json", - "target": "ifElseFunction_0", - "targetHandle": "ifElseFunction_0-input-functionInputVariables-json", - "type": "buttonedge", - "id": "llmChain_0-llmChain_0-output-outputPrediction-string|json-ifElseFunction_0-ifElseFunction_0-input-functionInputVariables-json" - }, - { - "source": "ifElseFunction_0", - "sourceHandle": "ifElseFunction_0-output-returnTrue-string|number|boolean|json|array", - "target": "setVariable_1", - "targetHandle": "setVariable_1-input-input-string | number | boolean | json | array", - "type": "buttonedge", - "id": "ifElseFunction_0-ifElseFunction_0-output-returnTrue-string|number|boolean|json|array-setVariable_1-setVariable_1-input-input-string | number | boolean | json | array" - }, - { - "source": "ifElseFunction_0", - "sourceHandle": "ifElseFunction_0-output-returnFalse-string|number|boolean|json|array", - "target": "promptTemplate_2", - "targetHandle": "promptTemplate_2-input-promptValues-json", - "type": "buttonedge", - "id": "ifElseFunction_0-ifElseFunction_0-output-returnFalse-string|number|boolean|json|array-promptTemplate_2-promptTemplate_2-input-promptValues-json" - }, - { - "source": "chatOpenAI_2", - "sourceHandle": "chatOpenAI_2-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", - "target": "llmChain_2", - "targetHandle": "llmChain_2-input-model-BaseLanguageModel", - "type": "buttonedge", - "id": "chatOpenAI_2-chatOpenAI_2-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-llmChain_2-llmChain_2-input-model-BaseLanguageModel" - }, - { - "source": "promptTemplate_2", - "sourceHandle": "promptTemplate_2-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate|Runnable", - "target": "llmChain_2", - "targetHandle": "llmChain_2-input-prompt-BasePromptTemplate", - "type": "buttonedge", - "id": "promptTemplate_2-promptTemplate_2-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate|Runnable-llmChain_2-llmChain_2-input-prompt-BasePromptTemplate" - }, - { - "source": "customFunction_2", - "sourceHandle": "customFunction_2-output-output-string|number|boolean|json|array", - "target": "promptTemplate_0", - "targetHandle": "promptTemplate_0-input-promptValues-json", - "type": "buttonedge", - "id": "customFunction_2-customFunction_2-output-output-string|number|boolean|json|array-promptTemplate_0-promptTemplate_0-input-promptValues-json" - } - ] -} diff --git a/packages/server/marketplaces/chatflows/Simple Chat Engine.json b/packages/server/marketplaces/chatflows/Simple Chat Engine.json index 5b9429cc0..5aa600cb5 100644 --- a/packages/server/marketplaces/chatflows/Simple Chat Engine.json +++ b/packages/server/marketplaces/chatflows/Simple Chat Engine.json @@ -1,6 +1,6 @@ { "description": "Simple chat engine to handle back and forth conversations using LlamaIndex", - "usecases": ["Chatbot"], + "usecases": ["Basic"], "framework": ["LlamaIndex"], "nodes": [ { diff --git a/packages/server/marketplaces/chatflows/Structured Output Parser.json b/packages/server/marketplaces/chatflows/Structured Output Parser.json deleted file mode 100644 index e80676322..000000000 --- a/packages/server/marketplaces/chatflows/Structured Output Parser.json +++ /dev/null @@ -1,490 +0,0 @@ -{ - "description": "Return response as a specified JSON structure instead of a string/text", - "framework": ["Langchain"], - "usecases": ["Extraction"], - "nodes": [ - { - "width": 300, - "height": 574, - "id": "chatOpenAI_0", - "position": { - "x": 845.3961479115309, - "y": -205.74401580699953 - }, - "type": "customNode", - "data": { - "id": "chatOpenAI_0", - "label": "ChatOpenAI", - "version": 6.0, - "name": "chatOpenAI", - "type": "ChatOpenAI", - "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "Runnable"], - "category": "Chat Models", - "description": "Wrapper around OpenAI large language models that use the Chat endpoint", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["openAIApi"], - "id": "chatOpenAI_0-input-credential-credential" - }, - { - "label": "Model Name", - "name": "modelName", - "type": "asyncOptions", - "loadMethod": "listModels", - "default": "gpt-3.5-turbo", - "id": "chatOpenAI_0-input-modelName-options" - }, - { - "label": "Temperature", - "name": "temperature", - "type": "number", - "step": 0.1, - "default": 0.9, - "optional": true, - "id": "chatOpenAI_0-input-temperature-number" - }, - { - "label": "Max Tokens", - "name": "maxTokens", - "type": "number", - "step": 1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-maxTokens-number" - }, - { - "label": "Top Probability", - "name": "topP", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-topP-number" - }, - { - "label": "Frequency Penalty", - "name": "frequencyPenalty", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-frequencyPenalty-number" - }, - { - "label": "Presence Penalty", - "name": "presencePenalty", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-presencePenalty-number" - }, - { - "label": "Timeout", - "name": "timeout", - "type": "number", - "step": 1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-timeout-number" - }, - { - "label": "BasePath", - "name": "basepath", - "type": "string", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-basepath-string" - }, - { - "label": "BaseOptions", - "name": "baseOptions", - "type": "json", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-baseOptions-json" - }, - { - "label": "Allow Image Uploads", - "name": "allowImageUploads", - "type": "boolean", - "description": "Automatically uses gpt-4-vision-preview when image is being uploaded from chat. Only works with LLMChain, Conversation Chain, ReAct Agent, and Conversational Agent", - "default": false, - "optional": true, - "id": "chatOpenAI_0-input-allowImageUploads-boolean" - }, - { - "label": "Image Resolution", - "description": "This parameter controls the resolution in which the model views the image.", - "name": "imageResolution", - "type": "options", - "options": [ - { - "label": "Low", - "name": "low" - }, - { - "label": "High", - "name": "high" - }, - { - "label": "Auto", - "name": "auto" - } - ], - "default": "low", - "optional": false, - "additionalParams": true, - "id": "chatOpenAI_0-input-imageResolution-options" - } - ], - "inputAnchors": [ - { - "label": "Cache", - "name": "cache", - "type": "BaseCache", - "optional": true, - "id": "chatOpenAI_0-input-cache-BaseCache" - } - ], - "inputs": { - "cache": "", - "modelName": "gpt-3.5-turbo", - "temperature": "0", - "maxTokens": "", - "topP": "", - "frequencyPenalty": "", - "presencePenalty": "", - "timeout": "", - "basepath": "", - "baseOptions": "", - "allowImageUploads": true, - "imageResolution": "low" - }, - "outputAnchors": [ - { - "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", - "name": "chatOpenAI", - "label": "ChatOpenAI", - "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | Runnable" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 845.3961479115309, - "y": -205.74401580699953 - }, - "dragging": false - }, - { - "width": 300, - "height": 456, - "id": "llmChain_0", - "position": { - "x": 1229.1699649849293, - "y": 245.55173505632646 - }, - "type": "customNode", - "data": { - "id": "llmChain_0", - "label": "LLM Chain", - "version": 3, - "name": "llmChain", - "type": "LLMChain", - "baseClasses": ["LLMChain", "BaseChain", "Runnable"], - "category": "Chains", - "description": "Chain to run queries against LLMs", - "inputParams": [ - { - "label": "Chain Name", - "name": "chainName", - "type": "string", - "placeholder": "Name Your Chain", - "optional": true, - "id": "llmChain_0-input-chainName-string" - } - ], - "inputAnchors": [ - { - "label": "Language Model", - "name": "model", - "type": "BaseLanguageModel", - "id": "llmChain_0-input-model-BaseLanguageModel" - }, - { - "label": "Prompt", - "name": "prompt", - "type": "BasePromptTemplate", - "id": "llmChain_0-input-prompt-BasePromptTemplate" - }, - { - "label": "Output Parser", - "name": "outputParser", - "type": "BaseLLMOutputParser", - "optional": true, - "id": "llmChain_0-input-outputParser-BaseLLMOutputParser" - }, - { - "label": "Input Moderation", - "description": "Detect text that could generate harmful output and prevent it from being sent to the language model", - "name": "inputModeration", - "type": "Moderation", - "optional": true, - "list": true, - "id": "llmChain_0-input-inputModeration-Moderation" - } - ], - "inputs": { - "model": "{{chatOpenAI_0.data.instance}}", - "prompt": "{{chatPromptTemplate_0.data.instance}}", - "outputParser": "{{structuredOutputParser_0.data.instance}}", - "chainName": "", - "inputModeration": "" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "options": [ - { - "id": "llmChain_0-output-llmChain-LLMChain|BaseChain|Runnable", - "name": "llmChain", - "label": "LLM Chain", - "type": "LLMChain | BaseChain | Runnable" - }, - { - "id": "llmChain_0-output-outputPrediction-string|json", - "name": "outputPrediction", - "label": "Output Prediction", - "type": "string | json" - } - ], - "default": "llmChain" - } - ], - "outputs": { - "output": "llmChain" - }, - "selected": false - }, - "positionAbsolute": { - "x": 1229.1699649849293, - "y": 245.55173505632646 - }, - "selected": false - }, - { - "width": 300, - "height": 652, - "id": "chatPromptTemplate_0", - "position": { - "x": 501.1597501123828, - "y": -154.43917602832562 - }, - "type": "customNode", - "data": { - "id": "chatPromptTemplate_0", - "label": "Chat Prompt Template", - "version": 1, - "name": "chatPromptTemplate", - "type": "ChatPromptTemplate", - "baseClasses": ["ChatPromptTemplate", "BaseChatPromptTemplate", "BasePromptTemplate", "Runnable"], - "category": "Prompts", - "description": "Schema to represent a chat prompt", - "inputParams": [ - { - "label": "System Message", - "name": "systemMessagePrompt", - "type": "string", - "rows": 4, - "placeholder": "You are a helpful assistant that translates {input_language} to {output_language}.", - "id": "chatPromptTemplate_0-input-systemMessagePrompt-string" - }, - { - "label": "Human Message", - "name": "humanMessagePrompt", - "type": "string", - "rows": 4, - "placeholder": "{text}", - "id": "chatPromptTemplate_0-input-humanMessagePrompt-string" - }, - { - "label": "Format Prompt Values", - "name": "promptValues", - "type": "json", - "optional": true, - "acceptVariable": true, - "list": true, - "id": "chatPromptTemplate_0-input-promptValues-json" - } - ], - "inputAnchors": [], - "inputs": { - "systemMessagePrompt": "Answer user's question as best you can", - "humanMessagePrompt": "{text}", - "promptValues": "" - }, - "outputAnchors": [ - { - "id": "chatPromptTemplate_0-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate|Runnable", - "name": "chatPromptTemplate", - "label": "ChatPromptTemplate", - "type": "ChatPromptTemplate | BaseChatPromptTemplate | BasePromptTemplate | Runnable" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 501.1597501123828, - "y": -154.43917602832562 - }, - "dragging": false - }, - { - "width": 300, - "height": 328, - "id": "structuredOutputParser_0", - "position": { - "x": 170.3869571939727, - "y": 343.9298288967859 - }, - "type": "customNode", - "data": { - "id": "structuredOutputParser_0", - "label": "Structured Output Parser", - "version": 1, - "name": "structuredOutputParser", - "type": "StructuredOutputParser", - "baseClasses": ["StructuredOutputParser", "BaseLLMOutputParser", "Runnable"], - "category": "Output Parsers", - "description": "Parse the output of an LLM call into a given (JSON) structure.", - "inputParams": [ - { - "label": "Autofix", - "name": "autofixParser", - "type": "boolean", - "optional": true, - "description": "In the event that the first call fails, will make another call to the model to fix any errors.", - "id": "structuredOutputParser_0-input-autofixParser-boolean" - }, - { - "label": "JSON Structure", - "name": "jsonStructure", - "type": "datagrid", - "description": "JSON structure for LLM to return", - "datagrid": [ - { - "field": "property", - "headerName": "Property", - "editable": true - }, - { - "field": "type", - "headerName": "Type", - "type": "singleSelect", - "valueOptions": ["string", "number", "boolean"], - "editable": true - }, - { - "field": "description", - "headerName": "Description", - "editable": true, - "flex": 1 - } - ], - "default": [ - { - "property": "answer", - "type": "string", - "description": "answer to the user's question" - }, - { - "property": "source", - "type": "string", - "description": "sources used to answer the question, should be websites" - } - ], - "additionalParams": true, - "id": "structuredOutputParser_0-input-jsonStructure-datagrid" - } - ], - "inputAnchors": [], - "inputs": { - "autofixParser": true, - "jsonStructure": [ - { - "property": "answer", - "type": "string", - "description": "answer to the user's question" - }, - { - "property": "source", - "type": "string", - "description": "sources used to answer the question, should be websites" - } - ] - }, - "outputAnchors": [ - { - "id": "structuredOutputParser_0-output-structuredOutputParser-StructuredOutputParser|BaseLLMOutputParser|Runnable", - "name": "structuredOutputParser", - "label": "StructuredOutputParser", - "type": "StructuredOutputParser | BaseLLMOutputParser | Runnable" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 170.3869571939727, - "y": 343.9298288967859 - }, - "dragging": false - } - ], - "edges": [ - { - "source": "chatOpenAI_0", - "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", - "target": "llmChain_0", - "targetHandle": "llmChain_0-input-model-BaseLanguageModel", - "type": "buttonedge", - "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-llmChain_0-llmChain_0-input-model-BaseLanguageModel", - "data": { - "label": "" - } - }, - { - "source": "chatPromptTemplate_0", - "sourceHandle": "chatPromptTemplate_0-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate|Runnable", - "target": "llmChain_0", - "targetHandle": "llmChain_0-input-prompt-BasePromptTemplate", - "type": "buttonedge", - "id": "chatPromptTemplate_0-chatPromptTemplate_0-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate|Runnable-llmChain_0-llmChain_0-input-prompt-BasePromptTemplate", - "data": { - "label": "" - } - }, - { - "source": "structuredOutputParser_0", - "sourceHandle": "structuredOutputParser_0-output-structuredOutputParser-StructuredOutputParser|BaseLLMOutputParser|Runnable", - "target": "llmChain_0", - "targetHandle": "llmChain_0-input-outputParser-BaseLLMOutputParser", - "type": "buttonedge", - "id": "structuredOutputParser_0-structuredOutputParser_0-output-structuredOutputParser-StructuredOutputParser|BaseLLMOutputParser|Runnable-llmChain_0-llmChain_0-input-outputParser-BaseLLMOutputParser", - "data": { - "label": "" - } - } - ] -} diff --git a/packages/server/marketplaces/chatflows/Tool Agent.json b/packages/server/marketplaces/chatflows/Tool Agent.json new file mode 100644 index 000000000..fb4400bdf --- /dev/null +++ b/packages/server/marketplaces/chatflows/Tool Agent.json @@ -0,0 +1,631 @@ +{ + "description": "An agent designed to use tools and LLM with function calling capability to provide responses", + "usecases": ["Agent"], + "framework": ["Langchain"], + "nodes": [ + { + "width": 300, + "height": 149, + "id": "calculator_1", + "position": { + "x": 800.5125025564965, + "y": 72.40592063242738 + }, + "type": "customNode", + "data": { + "id": "calculator_1", + "label": "Calculator", + "version": 1, + "name": "calculator", + "type": "Calculator", + "baseClasses": ["Calculator", "Tool", "StructuredTool", "BaseLangChain"], + "category": "Tools", + "description": "Perform calculations on response", + "inputParams": [], + "inputAnchors": [], + "inputs": {}, + "outputAnchors": [ + { + "id": "calculator_1-output-calculator-Calculator|Tool|StructuredTool|BaseLangChain", + "name": "calculator", + "label": "Calculator", + "type": "Calculator | Tool | StructuredTool | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "positionAbsolute": { + "x": 800.5125025564965, + "y": 72.40592063242738 + }, + "selected": false, + "dragging": false + }, + { + "width": 300, + "height": 259, + "id": "bufferMemory_1", + "position": { + "x": 607.6260576768354, + "y": 584.7920541862369 + }, + "type": "customNode", + "data": { + "id": "bufferMemory_1", + "label": "Buffer Memory", + "version": 2, + "name": "bufferMemory", + "type": "BufferMemory", + "baseClasses": ["BufferMemory", "BaseChatMemory", "BaseMemory"], + "category": "Memory", + "description": "Retrieve chat messages stored in database", + "inputParams": [ + { + "label": "Session Id", + "name": "sessionId", + "type": "string", + "description": "If not specified, a random id will be used. Learn more", + "default": "", + "additionalParams": true, + "optional": true, + "id": "bufferMemory_1-input-sessionId-string" + }, + { + "label": "Memory Key", + "name": "memoryKey", + "type": "string", + "default": "chat_history", + "additionalParams": true, + "id": "bufferMemory_1-input-memoryKey-string" + } + ], + "inputAnchors": [], + "inputs": { + "sessionId": "", + "memoryKey": "chat_history" + }, + "outputAnchors": [ + { + "id": "bufferMemory_1-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory", + "name": "bufferMemory", + "label": "BufferMemory", + "type": "BufferMemory | BaseChatMemory | BaseMemory" + } + ], + "outputs": {}, + "selected": false + }, + "positionAbsolute": { + "x": 607.6260576768354, + "y": 584.7920541862369 + }, + "selected": false, + "dragging": false + }, + { + "width": 300, + "height": 282, + "id": "serpAPI_0", + "position": { + "x": 439.29908455642476, + "y": 48.06000078669291 + }, + "type": "customNode", + "data": { + "id": "serpAPI_0", + "label": "Serp API", + "version": 1, + "name": "serpAPI", + "type": "SerpAPI", + "baseClasses": ["SerpAPI", "Tool", "StructuredTool"], + "category": "Tools", + "description": "Wrapper around SerpAPI - a real-time API to access Google search results", + "inputParams": [ + { + "label": "Connect Credential", + "name": "credential", + "type": "credential", + "credentialNames": ["serpApi"], + "id": "serpAPI_0-input-credential-credential" + } + ], + "inputAnchors": [], + "inputs": {}, + "outputAnchors": [ + { + "id": "serpAPI_0-output-serpAPI-SerpAPI|Tool|StructuredTool", + "name": "serpAPI", + "label": "SerpAPI", + "type": "SerpAPI | Tool | StructuredTool" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 439.29908455642476, + "y": 48.06000078669291 + }, + "dragging": false + }, + { + "width": 300, + "height": 772, + "id": "chatOpenAI_0", + "position": { + "x": 97.01321406237057, + "y": 63.67664262280914 + }, + "type": "customNode", + "data": { + "id": "chatOpenAI_0", + "label": "ChatOpenAI", + "version": 8.2, + "name": "chatOpenAI", + "type": "ChatOpenAI", + "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "Runnable"], + "category": "Chat Models", + "description": "Wrapper around OpenAI large language models that use the Chat endpoint", + "inputParams": [ + { + "label": "Connect Credential", + "name": "credential", + "type": "credential", + "credentialNames": ["openAIApi"], + "id": "chatOpenAI_0-input-credential-credential", + "display": true + }, + { + "label": "Model Name", + "name": "modelName", + "type": "asyncOptions", + "loadMethod": "listModels", + "default": "gpt-4o-mini", + "id": "chatOpenAI_0-input-modelName-asyncOptions", + "display": true + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "step": 0.1, + "default": 0.9, + "optional": true, + "id": "chatOpenAI_0-input-temperature-number", + "display": true + }, + { + "label": "Streaming", + "name": "streaming", + "type": "boolean", + "default": true, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-streaming-boolean", + "display": true + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "step": 1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-maxTokens-number", + "display": true + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "step": 0.1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-topP-number", + "display": true + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "step": 0.1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-frequencyPenalty-number", + "display": true + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "step": 0.1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-presencePenalty-number", + "display": true + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "step": 1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-timeout-number", + "display": true + }, + { + "label": "Strict Tool Calling", + "name": "strictToolCalling", + "type": "boolean", + "description": "Whether the model supports the `strict` argument when passing in tools. If not specified, the `strict` argument will not be passed to OpenAI.", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-strictToolCalling-boolean", + "display": true + }, + { + "label": "Stop Sequence", + "name": "stopSequence", + "type": "string", + "rows": 4, + "optional": true, + "description": "List of stop words to use when generating. Use comma to separate multiple stop words.", + "additionalParams": true, + "id": "chatOpenAI_0-input-stopSequence-string", + "display": true + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-basepath-string", + "display": true + }, + { + "label": "Proxy Url", + "name": "proxyUrl", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-proxyUrl-string", + "display": true + }, + { + "label": "BaseOptions", + "name": "baseOptions", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-baseOptions-json", + "display": true + }, + { + "label": "Allow Image Uploads", + "name": "allowImageUploads", + "type": "boolean", + "description": "Allow image input. Refer to the docs for more details.", + "default": false, + "optional": true, + "id": "chatOpenAI_0-input-allowImageUploads-boolean", + "display": true + }, + { + "label": "Image Resolution", + "description": "This parameter controls the resolution in which the model views the image.", + "name": "imageResolution", + "type": "options", + "options": [ + { + "label": "Low", + "name": "low" + }, + { + "label": "High", + "name": "high" + }, + { + "label": "Auto", + "name": "auto" + } + ], + "default": "low", + "optional": false, + "show": { + "allowImageUploads": true + }, + "id": "chatOpenAI_0-input-imageResolution-options", + "display": true + }, + { + "label": "Reasoning Effort", + "description": "Constrains effort on reasoning for reasoning models. Only applicable for o1 and o3 models.", + "name": "reasoningEffort", + "type": "options", + "options": [ + { + "label": "Low", + "name": "low" + }, + { + "label": "Medium", + "name": "medium" + }, + { + "label": "High", + "name": "high" + } + ], + "default": "medium", + "optional": false, + "additionalParams": true, + "id": "chatOpenAI_0-input-reasoningEffort-options", + "display": true + } + ], + "inputAnchors": [ + { + "label": "Cache", + "name": "cache", + "type": "BaseCache", + "optional": true, + "id": "chatOpenAI_0-input-cache-BaseCache", + "display": true + } + ], + "inputs": { + "cache": "", + "modelName": "gpt-4o-mini", + "temperature": 0.9, + "streaming": true, + "maxTokens": "", + "topP": "", + "frequencyPenalty": "", + "presencePenalty": "", + "timeout": "", + "strictToolCalling": "", + "stopSequence": "", + "basepath": "", + "proxyUrl": "", + "baseOptions": "", + "allowImageUploads": true, + "imageResolution": "low", + "reasoningEffort": "" + }, + "outputAnchors": [ + { + "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", + "name": "chatOpenAI", + "label": "ChatOpenAI", + "description": "Wrapper around OpenAI large language models that use the Chat endpoint", + "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | Runnable" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 97.01321406237057, + "y": 63.67664262280914 + }, + "dragging": false + }, + { + "id": "stickyNote_0", + "position": { + "x": 1197.3578961103253, + "y": 117.43214592301385 + }, + "type": "stickyNote", + "data": { + "id": "stickyNote_0", + "label": "Sticky Note", + "version": 2, + "name": "stickyNote", + "type": "StickyNote", + "baseClasses": ["StickyNote"], + "tags": ["Utilities"], + "category": "Utilities", + "description": "Add a sticky note", + "inputParams": [ + { + "label": "", + "name": "note", + "type": "string", + "rows": 1, + "placeholder": "Type something here", + "optional": true, + "id": "stickyNote_0-input-note-string" + } + ], + "inputAnchors": [], + "inputs": { + "note": "LLM has to be function calling compatible" + }, + "outputAnchors": [ + { + "id": "stickyNote_0-output-stickyNote-StickyNote", + "name": "stickyNote", + "label": "StickyNote", + "description": "Add a sticky note", + "type": "StickyNote" + } + ], + "outputs": {}, + "selected": false + }, + "width": 300, + "height": 62, + "selected": false, + "positionAbsolute": { + "x": 1197.3578961103253, + "y": 117.43214592301385 + }, + "dragging": false + }, + { + "id": "toolAgent_0", + "position": { + "x": 1200.6756893536506, + "y": 208.18578883272318 + }, + "type": "customNode", + "data": { + "id": "toolAgent_0", + "label": "Tool Agent", + "version": 2, + "name": "toolAgent", + "type": "AgentExecutor", + "baseClasses": ["AgentExecutor", "BaseChain", "Runnable"], + "category": "Agents", + "description": "Agent that uses Function Calling to pick the tools and args to call", + "inputParams": [ + { + "label": "System Message", + "name": "systemMessage", + "type": "string", + "default": "You are a helpful AI assistant.", + "description": "If Chat Prompt Template is provided, this will be ignored", + "rows": 4, + "optional": true, + "additionalParams": true, + "id": "toolAgent_0-input-systemMessage-string", + "display": true + }, + { + "label": "Max Iterations", + "name": "maxIterations", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "toolAgent_0-input-maxIterations-number", + "display": true + }, + { + "label": "Enable Detailed Streaming", + "name": "enableDetailedStreaming", + "type": "boolean", + "default": false, + "description": "Stream detailed intermediate steps during agent execution", + "optional": true, + "additionalParams": true, + "id": "toolAgent_0-input-enableDetailedStreaming-boolean", + "display": true + } + ], + "inputAnchors": [ + { + "label": "Tools", + "name": "tools", + "type": "Tool", + "list": true, + "id": "toolAgent_0-input-tools-Tool", + "display": true + }, + { + "label": "Memory", + "name": "memory", + "type": "BaseChatMemory", + "id": "toolAgent_0-input-memory-BaseChatMemory", + "display": true + }, + { + "label": "Tool Calling Chat Model", + "name": "model", + "type": "BaseChatModel", + "description": "Only compatible with models that are capable of function calling: ChatOpenAI, ChatMistral, ChatAnthropic, ChatGoogleGenerativeAI, ChatVertexAI, GroqChat", + "id": "toolAgent_0-input-model-BaseChatModel", + "display": true + }, + { + "label": "Chat Prompt Template", + "name": "chatPromptTemplate", + "type": "ChatPromptTemplate", + "description": "Override existing prompt with Chat Prompt Template. Human Message must includes {input} variable", + "optional": true, + "id": "toolAgent_0-input-chatPromptTemplate-ChatPromptTemplate", + "display": true + }, + { + "label": "Input Moderation", + "description": "Detect text that could generate harmful output and prevent it from being sent to the language model", + "name": "inputModeration", + "type": "Moderation", + "optional": true, + "list": true, + "id": "toolAgent_0-input-inputModeration-Moderation", + "display": true + } + ], + "inputs": { + "tools": ["{{calculator_1.data.instance}}", "{{serpAPI_0.data.instance}}"], + "memory": "{{bufferMemory_1.data.instance}}", + "model": "{{chatOpenAI_0.data.instance}}", + "chatPromptTemplate": "", + "systemMessage": "You are a helpful AI assistant.", + "inputModeration": "", + "maxIterations": "", + "enableDetailedStreaming": "" + }, + "outputAnchors": [ + { + "id": "toolAgent_0-output-toolAgent-AgentExecutor|BaseChain|Runnable", + "name": "toolAgent", + "label": "AgentExecutor", + "description": "Agent that uses Function Calling to pick the tools and args to call", + "type": "AgentExecutor | BaseChain | Runnable" + } + ], + "outputs": {}, + "selected": false + }, + "width": 300, + "height": 492, + "selected": false, + "positionAbsolute": { + "x": 1200.6756893536506, + "y": 208.18578883272318 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "calculator_1", + "sourceHandle": "calculator_1-output-calculator-Calculator|Tool|StructuredTool|BaseLangChain", + "target": "toolAgent_0", + "targetHandle": "toolAgent_0-input-tools-Tool", + "type": "buttonedge", + "id": "calculator_1-calculator_1-output-calculator-Calculator|Tool|StructuredTool|BaseLangChain-toolAgent_0-toolAgent_0-input-tools-Tool" + }, + { + "source": "serpAPI_0", + "sourceHandle": "serpAPI_0-output-serpAPI-SerpAPI|Tool|StructuredTool", + "target": "toolAgent_0", + "targetHandle": "toolAgent_0-input-tools-Tool", + "type": "buttonedge", + "id": "serpAPI_0-serpAPI_0-output-serpAPI-SerpAPI|Tool|StructuredTool-toolAgent_0-toolAgent_0-input-tools-Tool" + }, + { + "source": "chatOpenAI_0", + "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", + "target": "toolAgent_0", + "targetHandle": "toolAgent_0-input-model-BaseChatModel", + "type": "buttonedge", + "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-toolAgent_0-toolAgent_0-input-model-BaseChatModel" + }, + { + "source": "bufferMemory_1", + "sourceHandle": "bufferMemory_1-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory", + "target": "toolAgent_0", + "targetHandle": "toolAgent_0-input-memory-BaseChatMemory", + "type": "buttonedge", + "id": "bufferMemory_1-bufferMemory_1-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory-toolAgent_0-toolAgent_0-input-memory-BaseChatMemory" + } + ] +} diff --git a/packages/server/marketplaces/chatflows/Transcript Summarization.json b/packages/server/marketplaces/chatflows/Transcript Summarization.json deleted file mode 100644 index b8e8fd25a..000000000 --- a/packages/server/marketplaces/chatflows/Transcript Summarization.json +++ /dev/null @@ -1,512 +0,0 @@ -{ - "description": "Use Anthropic Claude with 200k context window to ingest whole document for summarization", - "framework": ["Langchain"], - "usecases": ["Summarization"], - "nodes": [ - { - "width": 300, - "height": 253, - "id": "bufferMemory_0", - "position": { - "x": 240.5161028076149, - "y": 165.35849026339048 - }, - "type": "customNode", - "data": { - "id": "bufferMemory_0", - "label": "Buffer Memory", - "version": 2, - "name": "bufferMemory", - "type": "BufferMemory", - "baseClasses": ["BufferMemory", "BaseChatMemory", "BaseMemory"], - "category": "Memory", - "description": "Retrieve chat messages stored in database", - "inputParams": [ - { - "label": "Session Id", - "name": "sessionId", - "type": "string", - "description": "If not specified, a random id will be used. Learn more", - "default": "", - "additionalParams": true, - "optional": true, - "id": "bufferMemory_0-input-sessionId-string" - }, - { - "label": "Memory Key", - "name": "memoryKey", - "type": "string", - "default": "chat_history", - "additionalParams": true, - "id": "bufferMemory_0-input-memoryKey-string" - } - ], - "inputAnchors": [], - "inputs": { - "sessionId": "", - "memoryKey": "chat_history" - }, - "outputAnchors": [ - { - "id": "bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory", - "name": "bufferMemory", - "label": "BufferMemory", - "type": "BufferMemory | BaseChatMemory | BaseMemory" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 240.5161028076149, - "y": 165.35849026339048 - }, - "dragging": false - }, - { - "width": 300, - "height": 435, - "id": "conversationChain_0", - "position": { - "x": 958.9887390513221, - "y": 318.8734467468765 - }, - "type": "customNode", - "data": { - "id": "conversationChain_0", - "label": "Conversation Chain", - "version": 3, - "name": "conversationChain", - "type": "ConversationChain", - "baseClasses": ["ConversationChain", "LLMChain", "BaseChain", "Runnable"], - "category": "Chains", - "description": "Chat models specific conversational chain with memory", - "inputParams": [ - { - "label": "System Message", - "name": "systemMessagePrompt", - "type": "string", - "rows": 4, - "description": "If Chat Prompt Template is provided, this will be ignored", - "additionalParams": true, - "optional": true, - "default": "The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.", - "placeholder": "The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.", - "id": "conversationChain_0-input-systemMessagePrompt-string" - } - ], - "inputAnchors": [ - { - "label": "Chat Model", - "name": "model", - "type": "BaseChatModel", - "id": "conversationChain_0-input-model-BaseChatModel" - }, - { - "label": "Memory", - "name": "memory", - "type": "BaseMemory", - "id": "conversationChain_0-input-memory-BaseMemory" - }, - { - "label": "Chat Prompt Template", - "name": "chatPromptTemplate", - "type": "ChatPromptTemplate", - "description": "Override existing prompt with Chat Prompt Template. Human Message must includes {input} variable", - "optional": true, - "id": "conversationChain_0-input-chatPromptTemplate-ChatPromptTemplate" - }, - { - "label": "Input Moderation", - "description": "Detect text that could generate harmful output and prevent it from being sent to the language model", - "name": "inputModeration", - "type": "Moderation", - "optional": true, - "list": true, - "id": "conversationChain_0-input-inputModeration-Moderation" - } - ], - "inputs": { - "inputModeration": "", - "model": "{{chatAnthropic_0.data.instance}}", - "memory": "{{bufferMemory_0.data.instance}}", - "chatPromptTemplate": "{{chatPromptTemplate_0.data.instance}}", - "systemMessagePrompt": "The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know." - }, - "outputAnchors": [ - { - "id": "conversationChain_0-output-conversationChain-ConversationChain|LLMChain|BaseChain|Runnable", - "name": "conversationChain", - "label": "ConversationChain", - "type": "ConversationChain | LLMChain | BaseChain | Runnable" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 958.9887390513221, - "y": 318.8734467468765 - }, - "dragging": false - }, - { - "width": 300, - "height": 670, - "id": "chatAnthropic_0", - "position": { - "x": 585.3308245972187, - "y": -116.32789506560908 - }, - "type": "customNode", - "data": { - "id": "chatAnthropic_0", - "label": "ChatAnthropic", - "version": 6, - "name": "chatAnthropic", - "type": "ChatAnthropic", - "baseClasses": ["ChatAnthropic", "BaseChatModel", "BaseLanguageModel", "Runnable"], - "category": "Chat Models", - "description": "Wrapper around ChatAnthropic large language models that use the Chat endpoint", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["anthropicApi"], - "id": "chatAnthropic_0-input-credential-credential" - }, - { - "label": "Model Name", - "name": "modelName", - "type": "asyncOptions", - "loadMethod": "listModels", - "default": "claude-3-haiku", - "id": "chatAnthropic_0-input-modelName-options" - }, - { - "label": "Temperature", - "name": "temperature", - "type": "number", - "step": 0.1, - "default": 0.9, - "optional": true, - "id": "chatAnthropic_0-input-temperature-number" - }, - { - "label": "Max Tokens", - "name": "maxTokensToSample", - "type": "number", - "step": 1, - "optional": true, - "additionalParams": true, - "id": "chatAnthropic_0-input-maxTokensToSample-number" - }, - { - "label": "Top P", - "name": "topP", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatAnthropic_0-input-topP-number" - }, - { - "label": "Top K", - "name": "topK", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatAnthropic_0-input-topK-number" - }, - { - "label": "Allow Image Uploads", - "name": "allowImageUploads", - "type": "boolean", - "description": "Automatically uses claude-3-* models when image is being uploaded from chat. Only works with LLMChain, Conversation Chain, ReAct Agent, and Conversational Agent", - "default": false, - "optional": true, - "id": "chatAnthropic_0-input-allowImageUploads-boolean" - } - ], - "inputAnchors": [ - { - "label": "Cache", - "name": "cache", - "type": "BaseCache", - "optional": true, - "id": "chatAnthropic_0-input-cache-BaseCache" - } - ], - "inputs": { - "cache": "", - "modelName": "claude-3-haiku-20240307", - "temperature": 0.9, - "maxTokensToSample": "", - "topP": "", - "topK": "", - "allowImageUploads": true - }, - "outputAnchors": [ - { - "id": "chatAnthropic_0-output-chatAnthropic-ChatAnthropic|BaseChatModel|BaseLanguageModel|Runnable", - "name": "chatAnthropic", - "label": "ChatAnthropic", - "type": "ChatAnthropic | BaseChatModel | BaseLanguageModel | Runnable" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 585.3308245972187, - "y": -116.32789506560908 - }, - "dragging": false - }, - { - "width": 300, - "height": 690, - "id": "chatPromptTemplate_0", - "position": { - "x": -106.44189698270114, - "y": 20.133956087516538 - }, - "type": "customNode", - "data": { - "id": "chatPromptTemplate_0", - "label": "Chat Prompt Template", - "version": 1, - "name": "chatPromptTemplate", - "type": "ChatPromptTemplate", - "baseClasses": ["ChatPromptTemplate", "BaseChatPromptTemplate", "BasePromptTemplate", "Runnable"], - "category": "Prompts", - "description": "Schema to represent a chat prompt", - "inputParams": [ - { - "label": "System Message", - "name": "systemMessagePrompt", - "type": "string", - "rows": 4, - "placeholder": "You are a helpful assistant that translates {input_language} to {output_language}.", - "id": "chatPromptTemplate_0-input-systemMessagePrompt-string" - }, - { - "label": "Human Message", - "name": "humanMessagePrompt", - "type": "string", - "rows": 4, - "placeholder": "{text}", - "id": "chatPromptTemplate_0-input-humanMessagePrompt-string" - }, - { - "label": "Format Prompt Values", - "name": "promptValues", - "type": "json", - "optional": true, - "acceptVariable": true, - "list": true, - "id": "chatPromptTemplate_0-input-promptValues-json" - } - ], - "inputAnchors": [], - "inputs": { - "systemMessagePrompt": "Act as an expert copywriter specializing in content optimization for SEO. Your task is to take a given YouTube transcript and transform it into a well-structured and engaging article. Your objectives are as follows:\n\nContent Transformation: Begin by thoroughly reading the provided YouTube transcript. Understand the main ideas, key points, and the overall message conveyed.\n\nSentence Structure: While rephrasing the content, pay careful attention to sentence structure. Ensure that the article flows logically and coherently.\n\nKeyword Identification: Identify the main keyword or phrase from the transcript. It's crucial to determine the primary topic that the YouTube video discusses.\n\nKeyword Integration: Incorporate the identified keyword naturally throughout the article. Use it in headings, subheadings, and within the body text. However, avoid overuse or keyword stuffing, as this can negatively affect SEO.\n\nUnique Content: Your goal is to make the article 100% unique. Avoid copying sentences directly from the transcript. Rewrite the content in your own words while retaining the original message and meaning.\n\nSEO Friendliness: Craft the article with SEO best practices in mind. This includes optimizing meta tags (title and meta description), using header tags appropriately, and maintaining an appropriate keyword density.\n\nEngaging and Informative: Ensure that the article is engaging and informative for the reader. It should provide value and insight on the topic discussed in the YouTube video.\n\nProofreading: Proofread the article for grammar, spelling, and punctuation errors. Ensure it is free of any mistakes that could detract from its quality.\n\nBy following these guidelines, create a well-optimized, unique, and informative article that would rank well in search engine results and engage readers effectively.\n\nTranscript:{transcript}", - "humanMessagePrompt": "{input}", - "promptValues": "{\"input\":\"{{question}}\",\"transcript\":\"{{plainText_0.data.instance}}\"}" - }, - "outputAnchors": [ - { - "id": "chatPromptTemplate_0-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate|Runnable", - "name": "chatPromptTemplate", - "label": "ChatPromptTemplate", - "type": "ChatPromptTemplate | BaseChatPromptTemplate | BasePromptTemplate | Runnable" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": -106.44189698270114, - "y": 20.133956087516538 - }, - "dragging": false - }, - { - "width": 300, - "height": 487, - "id": "plainText_0", - "position": { - "x": -487.7511991135089, - "y": 77.83838996645807 - }, - "type": "customNode", - "data": { - "id": "plainText_0", - "label": "Plain Text", - "version": 2, - "name": "plainText", - "type": "Document", - "baseClasses": ["Document"], - "category": "Document Loaders", - "description": "Load data from plain text", - "inputParams": [ - { - "label": "Text", - "name": "text", - "type": "string", - "rows": 4, - "placeholder": "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua...", - "id": "plainText_0-input-text-string" - }, - { - "label": "Metadata", - "name": "metadata", - "type": "json", - "optional": true, - "additionalParams": true, - "id": "plainText_0-input-metadata-json" - } - ], - "inputAnchors": [ - { - "label": "Text Splitter", - "name": "textSplitter", - "type": "TextSplitter", - "optional": true, - "id": "plainText_0-input-textSplitter-TextSplitter" - } - ], - "inputs": { - "text": "\n\n00:00 - It starts by showing [this question](https://stackoverflow.com/questions/32191198/i-would-like-to-split-this-into-a-list-but-i-dont-know-how-in-python) and reading it out loud\n\n0:00 - \"I have this string. I want to split it on the pipe, but I don't know how. I don't want to split it at the white-space, only at the pipe. Is this possible?\"\n\n0:10 - The comments below the question are presented and read out loud:\n\n> Did you try Googling _anything_ about the `split()` method?\n\n> Welcome to Stack Overflow! You seem to be asking for someone to write some code for you. Stack Overflow is a question and answer site, not a code-writing service. Please see here to learn how to write effective questions.\n\n0:23 - \"Whatever happened to Stack Overflow? How did we end up in a place where we not only get rude and snarky comments but we also get robotic-like responses from real people from valid questions?\"\n\n0:36 - \"This is not a rare scenario. This has become the norm on Stack Overflow. And I just have one question. How did we end up here?\"\n\n0:46 - \"My name is Gabe. And today we're going to look at some of the key factors that resulted in the cesspool that is Stack Overflow Q&A today. I'll also explore some approaches that may solve the problems we face, so that developers can just share information without fear of reproach.\" \n\n(In big letters, \"The Egotistical Mods of Stack Overflow\")\n\n1:08 - Title: The Stack Overflow Age\n\n1:12 - \"Let's go back in time to 2008. NASA's unmanned spacecraft Phoenix becomes the first to land on the northern polar region of Mars, Google Chrome is first released, the stock market plunges across the globe, and a little-known Stack Overflow comes into existence. Stack Overflow was created by Joel Spolsky and Jeff Atwood to address an issue that was apparent in the mid to late 2000s. The issue was that programmers had no way to easily share knowledge about difficult problems. There was Usenet which became obsolete once the World Wide Web became popular, then there was Experts Exchange where developers could share questions and answers. This had a myriad of problems the foremost being it was a paid serve. Joel wanted a free website that could replace Experts Exchange and earn money while doubling as a job listing board. He know somebody would do it eventually, so he waited.... and waited... and he waited... then one day, Jeff Atwood came to him for advice on blogging and Joel simply responded \"I've got a better idea instead\".\"\n\n2:12 - \"The Stack Overflow experiment went extraordinarily well. It was based on a gamification system similar to Reddit and other sites where users could upvote good questions and good answers. Once you got a certain amount of reputation you were granted more privileges to the site. These privileges included editing questions, closing questions and reopening questions. This means that poor questions are dealt with quickly and good answers rank higher. This worked great for moderation in the short term. However, it led to the problems we see today.\n\n2:28 - Title: \"The Problems\"\n\n2:42 - \"OK. So let's review real quickly how do users gain privilege? Well, they gain privilege by answering questions. What can you do with the privilege? You can answer questions, you can close questions, you can edit questions and you can reopen questions. OK. Well all that makes sense, well, what happens if you ask and somebody decides to close it? Well, you could reopen it but wait a second... you need the privilege to reopen it, and the privilege you don't have because... you're a new user.\"\n\n3:10 - \"OK well, what if somebody is bullying you in the comments? Well, you flag the comment OK, and then the flagged comment goes to... the moderators.\"\n\n3:19 - \"So let's just take a look at some questions on Stack Overflow right now. There's this user who asks:\"\n\n3:24 - A question is show, content verbatim save for the link, redacted in this transcript:\n\n> ### What technology was this website been built with?\n>\n> I apologise in advance if this question is not allowed here.\n>\n> Do you know what technology has been used to build this website?\n>\n> [REDACTED]\n> \n> Viewing the dev console and source has offered me no clues!\n\n(asked Nov 17 '16 at 18:34)\n\n3:31 - \"Now, I think this is a valid question. Programmers build websites using so many different technologies, it's reasonable to ask if anyone knows what a particular site used to build it. Well, the Stack Overflow users, they have a different thought. The first one says:\"\n\n> Seems like you already knew questions like this are off-topic. -- Nov 17 '16 at 18:36\n\n3:49 - \"And then this person says:\"\n\n> I see quite a bit of information in the dev. tools. -- Nov 17 '16 at 18:36\n\n\"The implication being, 'why can _you_ see what the devtools show _me_?'\nThis next person says:\"\n\n> Really? The console offered nothing? To me, it yelled out pixijs.com... -- Nov 17 '16 at 18:37\n\n4:06 - \"and this user gave a very helpful answer, that it's:\"\n\n> Magic, naturally. -- Nov 17 '16 at 18:38\n\n\"And then finally somebody just says:\"\n\n> I confirm it's pixijs -- Nov 17 '16 at 18:39\n\n\"It's pixijs, that's what they used. And the poor user simply says thanks and then leaves with their tail between their legs.\"\n\n> My apologies :( Thanks for the link -- Nov 17 '16 at 18:39\n\n4:21 - \"What's wrong with these people? Why is it so difficult to just say it's pixijs?\"\n\n4:27 - \"All right so the next question is pretty technical, but an experienced programmer should be able to help out. So this person asked:\"\n\n> ### Initialization of a constant reference with a number\n>\n> What is the meaning of the following line? Why is this allowed as 0 is an r-value and not a variable name? What is the significance of `const` in this statement?\n>\n> ```c++\n> const int &x = 0;\n> ```\n\n(asked May 15 '18 at 21:35) https://stackoverflow.com/q/50359481\n\n(Note: as presented in the video, the question was not closed and had a score of -10)\n\n4:41 - \"Aand the responses, so... first person says:\"\n\n> Homework? And what does your C++ textbook have to say on the subject? -- May 15 '18 at 21:36 (now deleted)\n\n\"And the next person, very helpfully, says:\"\n\n> Just read a [good book]. -- May 15 18' at 21:38 (now deleted)\n\n\"And then, 'here, this link has a similar question', which may or may not have been helpful at all\":\n\n> Similar: -- May 15 18' at 21:39\n\n4:56 - \"Now, I was actually curious about what this question... was, like what the answer to this was, because I didn't even know what it was. I code in C++ pretty regularly and did not know the meaning of this, I'd never seen this syntax, and didn't even know it was valid code. And this poor guy was downvoted 10 times, why? Because he had the audacity to ask a question. OK? OK.\n\n5:20 - \"So, he finally finds an answer and then he posts it to the website. Uh, the answer gets downvoted, so if anybody else is looking for something similar, they can't find it. And I was actually curious about this, like I had never seen this before, and yet if I ever were searching for it I probably wouldn't find it because Google is now gonna rank this low, since it was downvoted.\n\n5:39 - \"This next user asks:\"\n\n> ### What is e in e.preventDefault()\n>\n> I am not able to understand the parameter 'e' which is passed to prevent the Default Action in JavaScript\n> \n> ```javascript\n> document.getElementById('submit').addEventListener('click', calculate, false);\n> function calculate(e){\n> e.preventDefault():\n> }\n> ```\n\n(asked Sep 14 '17 at 9:57) https://stackoverflow.com/q/46216042\n\n5:49 - \"Now, to an experienced programmer this is pretty obvious. However, if you don't program and you've never seen this or you're new to programming, this is a completely valid question. The responses:\"\n\n> e is the event. -- Sep 14 '17 at 9:58\n\n6:02 - \"Well, that's actually a pretty tame response but it provides absolutely no information.\"\n\n(more unmentioned comments are displayed, as seen below)\n\n> e represents the event which has a lot of properties. -- Sep 14 '17 at 9:58\n\n> You can read about the [click event here] -- Sep 14 '17 at 10:02\n\n> When the JavaScript engine calls the callback you provided, it passes an Event object. You gain access to that passed object by giving the function a parameter. You don't have to call it e; you can use any valid variable name you want. Your confusion probably arises from the fact that you provide a function called by JS, instead of the other way around. -- Sep 14 '17 at 10:03\n\n6:07 - \"Then this lovely gentleman says:\"\n\n> \"This question shows zero research effort. Aside from the fact you get that answer by literally typing your title into google, did you try anything, like `console.log(e)` on different element bindings to see what it might be?\" -- Sep 14 '17 at 10:04 (now deleted)\n\n6:22 - \"What is wrong with these people, they seem to have forgotten that Stack Overflow is a Q&A site! How dare this user ask a question! On a Q&A site!\"\n\n6:31 - \"This user says:\"\n\n> I am learning coding c++ in Unreal Engine, I got this syntax I don't really get:\n> \n> ```c++\n> class USphereComponent* ProxSphere;\n> ```\n>\n> I think this means create a class, but this class is a pointer?\n> \n> But the result is just create an object called ProxSphere from an existing class USphereComponent.\n> \n> Could anyone explain how this syntax actually means and it's usage?\n\n(asked Feb 6 '16 at 17:51) https://stackoverflow.com/questions/35244342\n\n(At the time of writing, the question had received a score inflation also caused by the extra attention from the video (+13, -9))\n\n6:42 - \"The responses:\"\n\n> Please pick up a text book and learn C++ systematically. -- Feb 6 '16 at 17:52 (now deleted)\n\n\"Um, I'm sorry? This is a Q&A site.\"\n\n> This is a class pointer declaration, no more, no less. -- Feb 6 '16 at 17:52\n\n6:53 - \"Ah, that makes it perfectly clear, how did I not see that before. Just in case you're wondering, that is sarcasm, that is not clear at all! Once again, this is an example of a problem I've never seen before, and I program in C++ almost daily. This user thankfully provided a clear and concise explanation, but why did all those other users feel the need to waste time out of their day to berate someone who had the audacity to ask a question?\"\n\n7:23 - \"This next question's answer gives us a little window into the moderator's brains. Warning, it's a scary place. This users asks:\"\n\n> ### How to answer a closed question?\n>\n> This [question](https://stackoverflow.com/questions/61054657/input-twice-to-pass-the-condition?noredirect=1#comment108031699_61054657) was closed yesterday for obvious reasons. One important function in question which answers were really depending on that wasn't in question. Then after the question was closed, OP left a comment that they had added the function which makes the question very clear.\n> \n> How can I answer this question? Should I create a chat room?\n\n(asked April 7 '20 at 13:21) Meta https://meta.stackoverflow.com/questions/396379\n\n 7:43 - \"Now, this is the crux of the problem with Stack Overflow. Closed questions are sort of left in a limbo state. They're closed so they can't be answered. You have to edit it to be able to answer it, but we've already talked about the problems that come with editing it and trying to get your question reopened. Hint hint, it requires reputation, which most people don't have.\"\n\n8:02 - \"Fortunately, a moderator gives us an answer to this question. He says:\"\n\n> ### Edit the Question to include the comment, and then vote to reopen it.\n\n\"Oh, OK so, uh, you just edit it and then you have to vote to reopen it, so even if the question is fixed, you can't reopen it at all.\"\n\n8:18 - \"And then he puts in bold:\"\n\n> **Do not open a chat room or answer in comments or otherwise work around the closing.**\n\n\"'Cuz... how dare somebody try to help some other random person on the internet.\"\n\n8:29 - Title: \"Closed Questions\"\n\n8:29 - \"OK let's change it up a bit, let's look at some questions that were closed, but lots of people disagreed with that closing.\"\n\n8:36 - \"This question says:\"\n\n> ### The Use of Multiple JFrames: Good or Bad Practice?\n>\n> I'm developing an application which displays images, and plays sounds from a database. I'm trying to decide whether or not to use a separate JFrame to add images to the database from the GUI.\n>\n> I'm just wondering whether it is good practice to use multiple JFrame windows?\n\n(asked Mar 4 '12 at 11:53) https://stackoverflow.com/questions/9554636\n\n\"Well, why was this closed? Because... it is opinion based.\"\n\n8:46 - \"Stack Overflow moderators, they hate opinions, OK? And why do they hate opinions? Well... nobody really knows, heheh... and opinions are kinda tricky because you can't tell if a question's subjective or objective, there's kind of a blurry line between those two. And who gets to decide whether it's subjective of objective? Well, the moderators.\"\n\n9:10 - \"And in the responses we can see people say this question has become more valuable than they ever thought it could. Well, I guess the mods just got this one wrong.\"\n\n9:18 - \"Oops! It looks like they got this one wrong too. This question has 557 upvotes! That's a lot on Stack Overflow, where questions typically get only 5 or 6 upvotes. Why was it closed? Well, uh, we don't know. It was closed because a mod decided it needed to be.\"\n\n> ### Does anyone have benchmarks (code & results) comparing performance of Android apps written in Xamarin C# and Java?\n\n(asked Mar 4 '12 at 11:53) https://stackoverflow.com/questions/17134522\n\n9:34 - \"Surely this was just another one-off mistake, right? Wait. The mods messed up again? \"\n\n> ### Seeking useful Eclipse Java code templates \n\n(asked Jun 22 '09 at 19:00) https://stackoverflow.com/questions/1028858\n\n\"This question was closed because... it... wasn't focused enough. Well, it got 518 upvotes, so clearly some people, at least half a thousand of them, though it was focused enough.\"\n\n9:52 - \"I'm beginning to see a pattern here. 371 upvotes? Well, it's closed. Why? Because it's an opinion.\"\n\n> ### What is the best way to implement constants in Java?\n\n(asked Sep 15 '08 at 19:39, history locked) https://stackoverflow.com/q/66066\n\n9:56 - \"Another opinion? How dare these programmers ask an opinionated question.\"\n\n> ### Best XML parser for Java?\n\n(asked Dec 17 '08 at 6:52) https://stackoverflow.com/questions/373833\n\n10:01 - \"Finally someone was fed up enough and said:\n\n> +220 and not constructive. Clearly moderators and users have different perspectives on what is constructive. -- Jun 9 '14 at 6:40\n\n10:10 - \"I agree, random user. I completely, wholeheartedly agree.\"\n\n10:15 - \"Another opinion! Man, these stupid programmers can't stop asking subjective questions, can they!?\"\n\n> ### C++ code file extension? What is the difference between .cc and .cpp?\n\n(asked Oct 9 '09 at 17:23) https://stackoverflow.com/questions/1545080\n\n10:20 - \"These moderators, pff... they have such a difficult life.\"\n\n10:23 - \"This question was closed because, well, we actually don't know why it was closed. Eh eh.\"\n\n> ### Why have header files and .cpp files?\n\n(asked Dec 2 '08 at 13:17) https://stackoverflow.com/q/333889\n\n10:28 - \"Unfocused question. Closed! Nice. Moving on...\"\n\n> ### Calling C/C++ from Python?\n\n(asked Sep 28 '08 at 5:34) https://stackoverflow.com/questions/145270\n\n10:32 - \"More opinions. Moving on...\"\n\n> ### Case-insensitive string comparison in C++\n\n(asked Aug 14 '08 at 20:01) https://stackoverflow.com/q/11635\n\n10:34 - \"You know... I just don't like this guy's name. Let's close this one and move on.\"\n\n> ### Do you (really) write exception safe code?\n\n(asked Dec 5 '09 at 19:48) https://stackoverflow.com/questions/1853243\n\n10:38 - \"Another opinion! Oh wait, 3000 people actually wanted to know the answer to this one.\"\n\n> ### What is the best way to iterate over a dictionary?\n\n(not found via search, must have been deleted)\n\n10:42 - \"Maybe... I'm not such a good moderator. Nah, hah, these people just don't know an opinion when it smacks them in the face.\"\n\n10:54 - Title: \"Conclusion\"\n\n10:59 - \"Unfortunately I have so many more examples of this. If you want to see examples of this just click the link in the description and you can see up to 500 more questions that are just like the ones I just showed you.\"\n\nThe link: https://stackoverflow.com/search?q=closed%3A1+duplicate%3A0\n\n11:10 - \"Even though there are only 500 questions there, there's probably thousands of questions that will never see the light of day because of the rigged system that we already talked about that is in place.\"\n\n11:23 - \"Stack Overflow has a problem. I wish it didn't, because it's helped me and so many other programmers over the past decade. However, I think it's reaching its lifetime. It's going to remain a valuable resource for decades to come, but it's no longer gaining value. If you ask programmers who have asked questions on Stack Overflow, I bet you they got their own horror stories to tell. Not only that, but they'll talk about how they now go to Discord servers or Reddit, Quora, or anywhere else except Stack Overflow because, nobody likes to be berated for absolutely no reason. Maybe Stack Overflow will notice this problem. The real problem. And fix this.\"\n\n12:00 - \"Anyways, that is all I have for today. I hope you learned a little bit about the cesspool that is Stack Overflow.\"", - "textSplitter": "", - "metadata": "" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "options": [ - { - "id": "plainText_0-output-document-Document|json", - "name": "document", - "label": "Document", - "type": "Document | json" - }, - { - "id": "plainText_0-output-text-string|json", - "name": "text", - "label": "Text", - "type": "string | json" - } - ], - "default": "document" - } - ], - "outputs": { - "output": "text" - }, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": -487.7511991135089, - "y": 77.83838996645807 - }, - "dragging": false - }, - { - "id": "stickyNote_0", - "position": { - "x": 956.2443072079327, - "y": 19.62362357631281 - }, - "type": "stickyNote", - "data": { - "id": "stickyNote_0", - "label": "Sticky Note", - "version": 2, - "name": "stickyNote", - "type": "StickyNote", - "baseClasses": ["StickyNote"], - "tags": ["Utilities"], - "category": "Utilities", - "description": "Add a sticky note", - "inputParams": [ - { - "label": "", - "name": "note", - "type": "string", - "rows": 1, - "placeholder": "Type something here", - "optional": true, - "id": "stickyNote_0-input-note-string" - } - ], - "inputAnchors": [], - "inputs": { - "note": "With large context size LLM like Anthropic and Gemini, we can shovel whole content into LLM without breaking into chunks.\n\nThis is useful when you need to do summarization or translation word by word without losing any context.\n\nIn this example, we give a piece of Youtube transcript and a prompt for summarization.\n\nExample question:\nCan you summarize the key points?" - }, - "outputAnchors": [ - { - "id": "stickyNote_0-output-stickyNote-StickyNote", - "name": "stickyNote", - "label": "StickyNote", - "description": "Add a sticky note", - "type": "StickyNote" - } - ], - "outputs": {}, - "selected": false - }, - "width": 300, - "height": 284, - "selected": false, - "positionAbsolute": { - "x": 956.2443072079327, - "y": 19.62362357631281 - }, - "dragging": false - } - ], - "edges": [ - { - "source": "bufferMemory_0", - "sourceHandle": "bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory", - "target": "conversationChain_0", - "targetHandle": "conversationChain_0-input-memory-BaseMemory", - "type": "buttonedge", - "id": "bufferMemory_0-bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory-conversationChain_0-conversationChain_0-input-memory-BaseMemory" - }, - { - "source": "chatAnthropic_0", - "sourceHandle": "chatAnthropic_0-output-chatAnthropic-ChatAnthropic|BaseChatModel|BaseLanguageModel|Runnable", - "target": "conversationChain_0", - "targetHandle": "conversationChain_0-input-model-BaseChatModel", - "type": "buttonedge", - "id": "chatAnthropic_0-chatAnthropic_0-output-chatAnthropic-ChatAnthropic|BaseChatModel|BaseLanguageModel|Runnable-conversationChain_0-conversationChain_0-input-model-BaseChatModel" - }, - { - "source": "plainText_0", - "sourceHandle": "plainText_0-output-text-string|json", - "target": "chatPromptTemplate_0", - "targetHandle": "chatPromptTemplate_0-input-promptValues-json", - "type": "buttonedge", - "id": "plainText_0-plainText_0-output-text-string|json-chatPromptTemplate_0-chatPromptTemplate_0-input-promptValues-json" - }, - { - "source": "chatPromptTemplate_0", - "sourceHandle": "chatPromptTemplate_0-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate|Runnable", - "target": "conversationChain_0", - "targetHandle": "conversationChain_0-input-chatPromptTemplate-ChatPromptTemplate", - "type": "buttonedge", - "id": "chatPromptTemplate_0-chatPromptTemplate_0-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate|Runnable-conversationChain_0-conversationChain_0-input-chatPromptTemplate-ChatPromptTemplate" - } - ] -} diff --git a/packages/server/marketplaces/chatflows/Translator.json b/packages/server/marketplaces/chatflows/Translator.json deleted file mode 100644 index 9ec3285d8..000000000 --- a/packages/server/marketplaces/chatflows/Translator.json +++ /dev/null @@ -1,424 +0,0 @@ -{ - "description": "Language translation using LLM Chain with a Chat Prompt Template and Chat Model", - "usecases": ["Basic"], - "framework": ["Langchain"], - "nodes": [ - { - "width": 300, - "height": 690, - "id": "chatPromptTemplate_0", - "position": { - "x": 88.10922294721732, - "y": 373.4354021348812 - }, - "type": "customNode", - "data": { - "id": "chatPromptTemplate_0", - "label": "Chat Prompt Template", - "version": 1, - "name": "chatPromptTemplate", - "type": "ChatPromptTemplate", - "baseClasses": ["ChatPromptTemplate", "BaseChatPromptTemplate", "BasePromptTemplate"], - "category": "Prompts", - "description": "Schema to represent a chat prompt", - "inputParams": [ - { - "label": "System Message", - "name": "systemMessagePrompt", - "type": "string", - "rows": 4, - "placeholder": "You are a helpful assistant that translates {input_language} to {output_language}.", - "id": "chatPromptTemplate_0-input-systemMessagePrompt-string" - }, - { - "label": "Human Message", - "name": "humanMessagePrompt", - "type": "string", - "rows": 4, - "placeholder": "{text}", - "id": "chatPromptTemplate_0-input-humanMessagePrompt-string" - }, - { - "label": "Format Prompt Values", - "name": "promptValues", - "type": "json", - "optional": true, - "acceptVariable": true, - "list": true, - "id": "chatPromptTemplate_0-input-promptValues-json" - } - ], - "inputAnchors": [], - "inputs": { - "systemMessagePrompt": "You are a helpful assistant that translates {input_language} to {output_language}.", - "humanMessagePrompt": "{text}", - "promptValues": "{\"input_language\":\"English\",\"output_language\":\"French\",\"text\":\"\"}" - }, - "outputAnchors": [ - { - "id": "chatPromptTemplate_0-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate", - "name": "chatPromptTemplate", - "label": "ChatPromptTemplate", - "type": "ChatPromptTemplate | BaseChatPromptTemplate | BasePromptTemplate" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 88.10922294721732, - "y": 373.4354021348812 - }, - "dragging": false - }, - { - "width": 300, - "height": 670, - "id": "chatOpenAI_0", - "position": { - "x": 423.0077090865524, - "y": 380.66673510213775 - }, - "type": "customNode", - "data": { - "id": "chatOpenAI_0", - "label": "ChatOpenAI", - "version": 6, - "name": "chatOpenAI", - "type": "ChatOpenAI", - "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel"], - "category": "Chat Models", - "description": "Wrapper around OpenAI large language models that use the Chat endpoint", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["openAIApi"], - "id": "chatOpenAI_0-input-credential-credential" - }, - { - "label": "Model Name", - "name": "modelName", - "type": "asyncOptions", - "loadMethod": "listModels", - "default": "gpt-3.5-turbo", - "id": "chatOpenAI_0-input-modelName-options" - }, - { - "label": "Temperature", - "name": "temperature", - "type": "number", - "default": 0.9, - "optional": true, - "id": "chatOpenAI_0-input-temperature-number" - }, - { - "label": "Max Tokens", - "name": "maxTokens", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-maxTokens-number" - }, - { - "label": "Top Probability", - "name": "topP", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-topP-number" - }, - { - "label": "Frequency Penalty", - "name": "frequencyPenalty", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-frequencyPenalty-number" - }, - { - "label": "Presence Penalty", - "name": "presencePenalty", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-presencePenalty-number" - }, - { - "label": "Timeout", - "name": "timeout", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-timeout-number" - }, - { - "label": "BasePath", - "name": "basepath", - "type": "string", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-basepath-string" - }, - { - "label": "BaseOptions", - "name": "baseOptions", - "type": "json", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-baseOptions-json" - }, - { - "label": "Allow Image Uploads", - "name": "allowImageUploads", - "type": "boolean", - "description": "Automatically uses gpt-4-vision-preview when image is being uploaded from chat. Only works with LLMChain, Conversation Chain, ReAct Agent, and Conversational Agent", - "default": false, - "optional": true, - "id": "chatOpenAI_0-input-allowImageUploads-boolean" - }, - { - "label": "Image Resolution", - "description": "This parameter controls the resolution in which the model views the image.", - "name": "imageResolution", - "type": "options", - "options": [ - { - "label": "Low", - "name": "low" - }, - { - "label": "High", - "name": "high" - }, - { - "label": "Auto", - "name": "auto" - } - ], - "default": "low", - "optional": false, - "additionalParams": true, - "id": "chatOpenAI_0-input-imageResolution-options" - } - ], - "inputAnchors": [ - { - "label": "Cache", - "name": "cache", - "type": "BaseCache", - "optional": true, - "id": "chatOpenAI_0-input-cache-BaseCache" - } - ], - "inputs": { - "modelName": "gpt-3.5-turbo", - "temperature": "0", - "maxTokens": "", - "topP": "", - "frequencyPenalty": "", - "presencePenalty": "", - "timeout": "", - "basepath": "", - "baseOptions": "", - "allowImageUploads": true, - "imageResolution": "low" - }, - "outputAnchors": [ - { - "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel", - "name": "chatOpenAI", - "label": "ChatOpenAI", - "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 423.0077090865524, - "y": 380.66673510213775 - }, - "dragging": false - }, - { - "width": 300, - "height": 508, - "id": "llmChain_0", - "position": { - "x": 774.5069894501554, - "y": 480.02655553818863 - }, - "type": "customNode", - "data": { - "id": "llmChain_0", - "label": "LLM Chain", - "version": 3, - "name": "llmChain", - "type": "LLMChain", - "baseClasses": ["LLMChain", "BaseChain", "Runnable"], - "category": "Chains", - "description": "Chain to run queries against LLMs", - "inputParams": [ - { - "label": "Chain Name", - "name": "chainName", - "type": "string", - "placeholder": "Name Your Chain", - "optional": true, - "id": "llmChain_0-input-chainName-string" - } - ], - "inputAnchors": [ - { - "label": "Language Model", - "name": "model", - "type": "BaseLanguageModel", - "id": "llmChain_0-input-model-BaseLanguageModel" - }, - { - "label": "Prompt", - "name": "prompt", - "type": "BasePromptTemplate", - "id": "llmChain_0-input-prompt-BasePromptTemplate" - }, - { - "label": "Output Parser", - "name": "outputParser", - "type": "BaseLLMOutputParser", - "optional": true, - "id": "llmChain_0-input-outputParser-BaseLLMOutputParser" - }, - { - "label": "Input Moderation", - "description": "Detect text that could generate harmful output and prevent it from being sent to the language model", - "name": "inputModeration", - "type": "Moderation", - "optional": true, - "list": true, - "id": "llmChain_0-input-inputModeration-Moderation" - } - ], - "inputs": { - "model": "{{chatOpenAI_0.data.instance}}", - "prompt": "{{chatPromptTemplate_0.data.instance}}", - "outputParser": "", - "chainName": "Language Translation", - "inputModeration": "" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "options": [ - { - "id": "llmChain_0-output-llmChain-LLMChain|BaseChain|Runnable", - "name": "llmChain", - "label": "LLM Chain", - "type": "LLMChain | BaseChain | Runnable" - }, - { - "id": "llmChain_0-output-outputPrediction-string|json", - "name": "outputPrediction", - "label": "Output Prediction", - "type": "string | json" - } - ], - "default": "llmChain" - } - ], - "outputs": { - "output": "llmChain" - }, - "selected": false - }, - "selected": false, - "dragging": false, - "positionAbsolute": { - "x": 774.5069894501554, - "y": 480.02655553818863 - } - }, - { - "id": "stickyNote_0", - "position": { - "x": -258.15932684125505, - "y": 656.5109602097457 - }, - "type": "stickyNote", - "data": { - "id": "stickyNote_0", - "label": "Sticky Note", - "version": 2, - "name": "stickyNote", - "type": "StickyNote", - "baseClasses": ["StickyNote"], - "tags": ["Utilities"], - "category": "Utilities", - "description": "Add a sticky note", - "inputParams": [ - { - "label": "", - "name": "note", - "type": "string", - "rows": 1, - "placeholder": "Type something here", - "optional": true, - "id": "stickyNote_0-input-note-string" - } - ], - "inputAnchors": [], - "inputs": { - "note": "In the Format Prompt Values, we can specify the variables used in prompt.\n\n{\n input_language: \"English\",\n output_language: \"French\"\n}\n\nIf the last variable is not specified, in this case {text}, user question will be used as value." - }, - "outputAnchors": [ - { - "id": "stickyNote_0-output-stickyNote-StickyNote", - "name": "stickyNote", - "label": "StickyNote", - "description": "Add a sticky note", - "type": "StickyNote" - } - ], - "outputs": {}, - "selected": false - }, - "width": 300, - "height": 243, - "selected": false, - "positionAbsolute": { - "x": -258.15932684125505, - "y": 656.5109602097457 - }, - "dragging": false - } - ], - "edges": [ - { - "source": "chatOpenAI_0", - "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel", - "target": "llmChain_0", - "targetHandle": "llmChain_0-input-model-BaseLanguageModel", - "type": "buttonedge", - "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-llmChain_0-llmChain_0-input-model-BaseLanguageModel", - "data": { - "label": "" - } - }, - { - "source": "chatPromptTemplate_0", - "sourceHandle": "chatPromptTemplate_0-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate", - "target": "llmChain_0", - "targetHandle": "llmChain_0-input-prompt-BasePromptTemplate", - "type": "buttonedge", - "id": "chatPromptTemplate_0-chatPromptTemplate_0-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate-llmChain_0-llmChain_0-input-prompt-BasePromptTemplate", - "data": { - "label": "" - } - } - ] -} diff --git a/packages/server/marketplaces/chatflows/Vectara RAG Chain.json b/packages/server/marketplaces/chatflows/Vectara RAG Chain.json deleted file mode 100644 index e2dcde49c..000000000 --- a/packages/server/marketplaces/chatflows/Vectara RAG Chain.json +++ /dev/null @@ -1,398 +0,0 @@ -{ - "description": "Using Vectara for Retrieval Augmented Generation (RAG) to answer questions from documents", - "usecases": ["Documents QnA"], - "framework": ["Langchain"], - "nodes": [ - { - "width": 300, - "height": 520, - "id": "vectaraQAChain_0", - "position": { - "x": 740.28434119739, - "y": 164.93261446841598 - }, - "type": "customNode", - "data": { - "id": "vectaraQAChain_0", - "label": "Vectara QA Chain", - "version": 2, - "name": "vectaraQAChain", - "type": "VectaraQAChain", - "baseClasses": ["VectaraQAChain", "BaseChain", "Runnable"], - "category": "Chains", - "description": "QA chain for Vectara", - "inputParams": [ - { - "label": "Summarizer Prompt Name", - "name": "summarizerPromptName", - "description": "Summarize the results fetched from Vectara. Read more", - "type": "options", - "options": [ - { - "label": "vectara-summary-ext-v1.2.0 (gpt-3.5-turbo)", - "name": "vectara-summary-ext-v1.2.0" - }, - { - "label": "vectara-experimental-summary-ext-2023-10-23-small (gpt-3.5-turbo)", - "name": "vectara-experimental-summary-ext-2023-10-23-small", - "description": "In beta, available to both Growth and Scale Vectara users" - }, - { - "label": "vectara-summary-ext-v1.3.0 (gpt-4.0)", - "name": "vectara-summary-ext-v1.3.0", - "description": "Only available to paying Scale Vectara users" - }, - { - "label": "vectara-experimental-summary-ext-2023-10-23-med (gpt-4.0)", - "name": "vectara-experimental-summary-ext-2023-10-23-med", - "description": "In beta, only available to paying Scale Vectara users" - } - ], - "default": "vectara-summary-ext-v1.2.0", - "id": "vectaraQAChain_0-input-summarizerPromptName-options" - }, - { - "label": "Response Language", - "name": "responseLang", - "description": "Return the response in specific language. If not selected, Vectara will automatically detects the language. Read more", - "type": "options", - "options": [ - { - "label": "English", - "name": "eng" - }, - { - "label": "German", - "name": "deu" - }, - { - "label": "French", - "name": "fra" - }, - { - "label": "Chinese", - "name": "zho" - }, - { - "label": "Korean", - "name": "kor" - }, - { - "label": "Arabic", - "name": "ara" - }, - { - "label": "Russian", - "name": "rus" - }, - { - "label": "Thai", - "name": "tha" - }, - { - "label": "Dutch", - "name": "nld" - }, - { - "label": "Italian", - "name": "ita" - }, - { - "label": "Portuguese", - "name": "por" - }, - { - "label": "Spanish", - "name": "spa" - }, - { - "label": "Japanese", - "name": "jpn" - }, - { - "label": "Polish", - "name": "pol" - }, - { - "label": "Turkish", - "name": "tur" - }, - { - "label": "Vietnamese", - "name": "vie" - }, - { - "label": "Indonesian", - "name": "ind" - }, - { - "label": "Czech", - "name": "ces" - }, - { - "label": "Ukrainian", - "name": "ukr" - }, - { - "label": "Greek", - "name": "ell" - }, - { - "label": "Hebrew", - "name": "heb" - }, - { - "label": "Farsi/Persian", - "name": "fas" - }, - { - "label": "Hindi", - "name": "hin" - }, - { - "label": "Urdu", - "name": "urd" - }, - { - "label": "Swedish", - "name": "swe" - }, - { - "label": "Bengali", - "name": "ben" - }, - { - "label": "Malay", - "name": "msa" - }, - { - "label": "Romanian", - "name": "ron" - } - ], - "optional": true, - "default": "eng", - "id": "vectaraQAChain_0-input-responseLang-options" - }, - { - "label": "Max Summarized Results", - "name": "maxSummarizedResults", - "description": "Maximum results used to build the summarized response", - "type": "number", - "default": 7, - "id": "vectaraQAChain_0-input-maxSummarizedResults-number" - } - ], - "inputAnchors": [ - { - "label": "Vectara Store", - "name": "vectaraStore", - "type": "VectorStore", - "id": "vectaraQAChain_0-input-vectaraStore-VectorStore" - }, - { - "label": "Input Moderation", - "description": "Detect text that could generate harmful output and prevent it from being sent to the language model", - "name": "inputModeration", - "type": "Moderation", - "optional": true, - "list": true, - "id": "vectaraQAChain_0-input-inputModeration-Moderation" - } - ], - "inputs": { - "inputModeration": "", - "vectaraStore": "{{vectara_1.data.instance}}", - "summarizerPromptName": "vectara-experimental-summary-ext-2023-10-23-small", - "responseLang": "eng", - "maxSummarizedResults": 7 - }, - "outputAnchors": [ - { - "id": "vectaraQAChain_0-output-vectaraQAChain-VectaraQAChain|BaseChain|Runnable", - "name": "vectaraQAChain", - "label": "VectaraQAChain", - "type": "VectaraQAChain | BaseChain | Runnable" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 740.28434119739, - "y": 164.93261446841598 - }, - "dragging": false - }, - { - "width": 300, - "height": 536, - "id": "vectara_1", - "position": { - "x": 139.43135627266395, - "y": 189.3685569634871 - }, - "type": "customNode", - "data": { - "id": "vectara_1", - "label": "Vectara", - "version": 2, - "name": "vectara", - "type": "Vectara", - "baseClasses": ["Vectara", "VectorStoreRetriever", "BaseRetriever"], - "category": "Vector Stores", - "description": "Upsert embedded data and perform similarity search upon query using Vectara, a LLM-powered search-as-a-service", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["vectaraApi"], - "id": "vectara_1-input-credential-credential" - }, - { - "label": "File", - "name": "file", - "description": "File to upload to Vectara. Supported file types: https://docs.vectara.com/docs/api-reference/indexing-apis/file-upload/file-upload-filetypes", - "type": "file", - "optional": true, - "id": "vectara_1-input-file-file" - }, - { - "label": "Metadata Filter", - "name": "filter", - "description": "Filter to apply to Vectara metadata. Refer to the documentation on how to use Vectara filters with Flowise.", - "type": "string", - "additionalParams": true, - "optional": true, - "id": "vectara_1-input-filter-string" - }, - { - "label": "Sentences Before", - "name": "sentencesBefore", - "description": "Number of sentences to fetch before the matched sentence. Defaults to 2.", - "type": "number", - "default": 2, - "additionalParams": true, - "optional": true, - "id": "vectara_1-input-sentencesBefore-number" - }, - { - "label": "Sentences After", - "name": "sentencesAfter", - "description": "Number of sentences to fetch after the matched sentence. Defaults to 2.", - "type": "number", - "default": 2, - "additionalParams": true, - "optional": true, - "id": "vectara_1-input-sentencesAfter-number" - }, - { - "label": "Lambda", - "name": "lambda", - "description": "Enable hybrid search to improve retrieval accuracy by adjusting the balance (from 0 to 1) between neural search and keyword-based search factors.A value of 0.0 means that only neural search is used, while a value of 1.0 means that only keyword-based search is used. Defaults to 0.0 (neural only).", - "default": 0, - "type": "number", - "additionalParams": true, - "optional": true, - "id": "vectara_1-input-lambda-number" - }, - { - "label": "Top K", - "name": "topK", - "description": "Number of top results to fetch. Defaults to 5", - "placeholder": "5", - "type": "number", - "additionalParams": true, - "optional": true, - "id": "vectara_1-input-topK-number" - }, - { - "label": "MMR K", - "name": "mmrK", - "description": "Number of top results to fetch for MMR. Defaults to 50", - "placeholder": "50", - "type": "number", - "additionalParams": true, - "optional": true, - "id": "vectara_1-input-mmrK-number" - }, - { - "label": "MMR diversity bias", - "name": "mmrDiversityBias", - "step": 0.1, - "description": "The diversity bias to use for MMR. This is a value between 0.0 and 1.0Values closer to 1.0 optimize for the most diverse results.Defaults to 0 (MMR disabled)", - "placeholder": "0.0", - "type": "number", - "additionalParams": true, - "optional": true, - "id": "vectara_1-input-mmrDiversityBias-number" - } - ], - "inputAnchors": [ - { - "label": "Document", - "name": "document", - "type": "Document", - "list": true, - "optional": true, - "id": "vectara_1-input-document-Document" - } - ], - "inputs": { - "document": "", - "filter": "", - "sentencesBefore": 2, - "sentencesAfter": 2, - "lambda": "", - "topK": "", - "mmrK": "", - "mmrDiversityBias": "" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "options": [ - { - "id": "vectara_1-output-retriever-Vectara|VectorStoreRetriever|BaseRetriever", - "name": "retriever", - "label": "Vectara Retriever", - "type": "Vectara | VectorStoreRetriever | BaseRetriever" - }, - { - "id": "vectara_1-output-vectorStore-Vectara|VectorStore", - "name": "vectorStore", - "label": "Vectara Vector Store", - "type": "Vectara | VectorStore" - } - ], - "default": "retriever" - } - ], - "outputs": { - "output": "vectorStore" - }, - "selected": false - }, - "positionAbsolute": { - "x": 139.43135627266395, - "y": 189.3685569634871 - }, - "selected": false, - "dragging": false - } - ], - "edges": [ - { - "source": "vectara_1", - "sourceHandle": "vectara_1-output-vectorStore-Vectara|VectorStore", - "target": "vectaraQAChain_0", - "targetHandle": "vectaraQAChain_0-input-vectaraStore-VectorStore", - "type": "buttonedge", - "id": "vectara_1-vectara_1-output-vectorStore-Vectara|VectorStore-vectaraQAChain_0-vectaraQAChain_0-input-vectaraStore-VectorStore" - } - ] -} diff --git a/packages/server/marketplaces/chatflows/WebPage QnA.json b/packages/server/marketplaces/chatflows/WebPage QnA.json deleted file mode 100644 index 14900ce61..000000000 --- a/packages/server/marketplaces/chatflows/WebPage QnA.json +++ /dev/null @@ -1,788 +0,0 @@ -{ - "description": "Scrape web pages to be used with Retrieval Augmented Generation (RAG) for question answering", - "usecases": ["Documents QnA"], - "framework": ["Langchain"], - "badge": "POPULAR", - "nodes": [ - { - "width": 300, - "height": 424, - "id": "openAIEmbeddings_0", - "position": { - "x": 805.4033852865127, - "y": 289.17383087232275 - }, - "type": "customNode", - "data": { - "id": "openAIEmbeddings_0", - "label": "OpenAI Embeddings", - "version": 4, - "name": "openAIEmbeddings", - "type": "OpenAIEmbeddings", - "baseClasses": ["OpenAIEmbeddings", "Embeddings"], - "category": "Embeddings", - "description": "OpenAI API to generate embeddings for a given text", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["openAIApi"], - "id": "openAIEmbeddings_0-input-credential-credential" - }, - { - "label": "Model Name", - "name": "modelName", - "type": "asyncOptions", - "loadMethod": "listModels", - "default": "text-embedding-ada-002", - "id": "openAIEmbeddings_0-input-modelName-asyncOptions" - }, - { - "label": "Strip New Lines", - "name": "stripNewLines", - "type": "boolean", - "optional": true, - "additionalParams": true, - "id": "openAIEmbeddings_0-input-stripNewLines-boolean" - }, - { - "label": "Batch Size", - "name": "batchSize", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "openAIEmbeddings_0-input-batchSize-number" - }, - { - "label": "Timeout", - "name": "timeout", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "openAIEmbeddings_0-input-timeout-number" - }, - { - "label": "BasePath", - "name": "basepath", - "type": "string", - "optional": true, - "additionalParams": true, - "id": "openAIEmbeddings_0-input-basepath-string" - }, - { - "label": "Dimensions", - "name": "dimensions", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "openAIEmbeddings_0-input-dimensions-number" - } - ], - "inputAnchors": [], - "inputs": { - "modelName": "text-embedding-ada-002", - "stripNewLines": "", - "batchSize": "", - "timeout": "", - "basepath": "", - "dimensions": "" - }, - "outputAnchors": [ - { - "id": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", - "name": "openAIEmbeddings", - "label": "OpenAIEmbeddings", - "description": "OpenAI API to generate embeddings for a given text", - "type": "OpenAIEmbeddings | Embeddings" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 805.4033852865127, - "y": 289.17383087232275 - }, - "dragging": false - }, - { - "width": 300, - "height": 378, - "id": "htmlToMarkdownTextSplitter_0", - "position": { - "x": 459.0189921792261, - "y": -21.97787557438943 - }, - "type": "customNode", - "data": { - "id": "htmlToMarkdownTextSplitter_0", - "label": "HtmlToMarkdown Text Splitter", - "version": 1, - "name": "htmlToMarkdownTextSplitter", - "type": "HtmlToMarkdownTextSplitter", - "baseClasses": [ - "HtmlToMarkdownTextSplitter", - "MarkdownTextSplitter", - "RecursiveCharacterTextSplitter", - "TextSplitter", - "BaseDocumentTransformer" - ], - "category": "Text Splitters", - "description": "Converts Html to Markdown and then split your content into documents based on the Markdown headers", - "inputParams": [ - { - "label": "Chunk Size", - "name": "chunkSize", - "type": "number", - "default": 1000, - "optional": true, - "id": "htmlToMarkdownTextSplitter_0-input-chunkSize-number" - }, - { - "label": "Chunk Overlap", - "name": "chunkOverlap", - "type": "number", - "optional": true, - "id": "htmlToMarkdownTextSplitter_0-input-chunkOverlap-number" - } - ], - "inputAnchors": [], - "inputs": { - "chunkSize": "4000", - "chunkOverlap": "" - }, - "outputAnchors": [ - { - "id": "htmlToMarkdownTextSplitter_0-output-htmlToMarkdownTextSplitter-HtmlToMarkdownTextSplitter|MarkdownTextSplitter|RecursiveCharacterTextSplitter|TextSplitter|BaseDocumentTransformer", - "name": "htmlToMarkdownTextSplitter", - "label": "HtmlToMarkdownTextSplitter", - "type": "HtmlToMarkdownTextSplitter | MarkdownTextSplitter | RecursiveCharacterTextSplitter | TextSplitter | BaseDocumentTransformer" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 459.0189921792261, - "y": -21.97787557438943 - }, - "dragging": false - }, - { - "width": 300, - "height": 532, - "id": "conversationalRetrievalQAChain_0", - "position": { - "x": 1892.82894546983, - "y": 282.2572649522094 - }, - "type": "customNode", - "data": { - "id": "conversationalRetrievalQAChain_0", - "label": "Conversational Retrieval QA Chain", - "version": 3, - "name": "conversationalRetrievalQAChain", - "type": "ConversationalRetrievalQAChain", - "baseClasses": ["ConversationalRetrievalQAChain", "BaseChain", "Runnable"], - "category": "Chains", - "description": "Document QA - built on RetrievalQAChain to provide a chat history component", - "inputParams": [ - { - "label": "Return Source Documents", - "name": "returnSourceDocuments", - "type": "boolean", - "optional": true, - "id": "conversationalRetrievalQAChain_0-input-returnSourceDocuments-boolean" - }, - { - "label": "Rephrase Prompt", - "name": "rephrasePrompt", - "type": "string", - "description": "Using previous chat history, rephrase question into a standalone question", - "warning": "Prompt must include input variables: {chat_history} and {question}", - "rows": 4, - "additionalParams": true, - "optional": true, - "default": "Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question.\n\nChat History:\n{chat_history}\nFollow Up Input: {question}\nStandalone Question:", - "id": "conversationalRetrievalQAChain_0-input-rephrasePrompt-string" - }, - { - "label": "Response Prompt", - "name": "responsePrompt", - "type": "string", - "description": "Taking the rephrased question, search for answer from the provided context", - "warning": "Prompt must include input variable: {context}", - "rows": 4, - "additionalParams": true, - "optional": true, - "default": "You are a helpful assistant. Using the provided context, answer the user's question to the best of your ability using the resources provided.\nIf there is nothing in the context relevant to the question at hand, just say \"Hmm, I'm not sure.\" Don't try to make up an answer.\n------------\n{context}\n------------\nREMEMBER: If there is no relevant information within the context, just say \"Hmm, I'm not sure.\" Don't try to make up an answer.", - "id": "conversationalRetrievalQAChain_0-input-responsePrompt-string" - } - ], - "inputAnchors": [ - { - "label": "Chat Model", - "name": "model", - "type": "BaseChatModel", - "id": "conversationalRetrievalQAChain_0-input-model-BaseChatModel" - }, - { - "label": "Vector Store Retriever", - "name": "vectorStoreRetriever", - "type": "BaseRetriever", - "id": "conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever" - }, - { - "label": "Memory", - "name": "memory", - "type": "BaseMemory", - "optional": true, - "description": "If left empty, a default BufferMemory will be used", - "id": "conversationalRetrievalQAChain_0-input-memory-BaseMemory" - }, - { - "label": "Input Moderation", - "description": "Detect text that could generate harmful output and prevent it from being sent to the language model", - "name": "inputModeration", - "type": "Moderation", - "optional": true, - "list": true, - "id": "conversationalRetrievalQAChain_0-input-inputModeration-Moderation" - } - ], - "inputs": { - "inputModeration": "", - "model": "{{chatOpenAI_0.data.instance}}", - "vectorStoreRetriever": "{{pinecone_0.data.instance}}", - "memory": "", - "returnSourceDocuments": true, - "rephrasePrompt": "Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question.\n\nChat History:\n{chat_history}\nFollow Up Input: {question}\nStandalone Question:", - "responsePrompt": "You are a helpful assistant. Using the provided context, answer the user's question to the best of your ability using the resources provided.\nIf there is nothing in the context relevant to the question at hand, just say \"Hmm, I'm not sure.\" Don't try to make up an answer.\n------------\n{context}\n------------\nREMEMBER: If there is no relevant information within the context, just say \"Hmm, I'm not sure.\" Don't try to make up an answer." - }, - "outputAnchors": [ - { - "id": "conversationalRetrievalQAChain_0-output-conversationalRetrievalQAChain-ConversationalRetrievalQAChain|BaseChain|Runnable", - "name": "conversationalRetrievalQAChain", - "label": "ConversationalRetrievalQAChain", - "type": "ConversationalRetrievalQAChain | BaseChain | Runnable" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 1892.82894546983, - "y": 282.2572649522094 - }, - "dragging": false - }, - { - "width": 300, - "height": 426, - "id": "cheerioWebScraper_0", - "position": { - "x": 815.9295655148293, - "y": -190.50425962124604 - }, - "type": "customNode", - "data": { - "id": "cheerioWebScraper_0", - "label": "Cheerio Web Scraper", - "version": 1.1, - "name": "cheerioWebScraper", - "type": "Document", - "baseClasses": ["Document"], - "category": "Document Loaders", - "description": "Load data from webpages", - "inputParams": [ - { - "label": "URL", - "name": "url", - "type": "string", - "id": "cheerioWebScraper_0-input-url-string" - }, - { - "label": "Get Relative Links Method", - "name": "relativeLinksMethod", - "type": "options", - "description": "Select a method to retrieve relative links", - "options": [ - { - "label": "Web Crawl", - "name": "webCrawl", - "description": "Crawl relative links from HTML URL" - }, - { - "label": "Scrape XML Sitemap", - "name": "scrapeXMLSitemap", - "description": "Scrape relative links from XML sitemap URL" - } - ], - "optional": true, - "additionalParams": true, - "id": "cheerioWebScraper_0-input-relativeLinksMethod-options" - }, - { - "label": "Get Relative Links Limit", - "name": "limit", - "type": "number", - "optional": true, - "additionalParams": true, - "description": "Only used when \"Get Relative Links Method\" is selected. Set 0 to retrieve all relative links, default limit is 10.", - "warning": "Retrieving all links might take long time, and all links will be upserted again if the flow's state changed (eg: different URL, chunk size, etc)", - "id": "cheerioWebScraper_0-input-limit-number" - }, - { - "label": "Selector (CSS)", - "name": "selector", - "type": "string", - "description": "Specify a CSS selector to select the content to be extracted", - "optional": true, - "additionalParams": true, - "id": "cheerioWebScraper_0-input-selector-string" - }, - { - "label": "Metadata", - "name": "metadata", - "type": "json", - "optional": true, - "additionalParams": true, - "id": "cheerioWebScraper_0-input-metadata-json" - } - ], - "inputAnchors": [ - { - "label": "Text Splitter", - "name": "textSplitter", - "type": "TextSplitter", - "optional": true, - "id": "cheerioWebScraper_0-input-textSplitter-TextSplitter" - } - ], - "inputs": { - "url": "https://flowiseai.com/", - "textSplitter": "{{htmlToMarkdownTextSplitter_0.data.instance}}", - "relativeLinksMethod": "", - "limit": "", - "metadata": "" - }, - "outputAnchors": [ - { - "id": "cheerioWebScraper_0-output-cheerioWebScraper-Document", - "name": "cheerioWebScraper", - "label": "Document", - "type": "Document" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 815.9295655148293, - "y": -190.50425962124604 - }, - "dragging": false - }, - { - "width": 300, - "height": 670, - "id": "chatOpenAI_0", - "position": { - "x": 1532.4907022314349, - "y": -270.38662863532466 - }, - "type": "customNode", - "data": { - "id": "chatOpenAI_0", - "label": "ChatOpenAI", - "version": 6, - "name": "chatOpenAI", - "type": "ChatOpenAI", - "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "Runnable"], - "category": "Chat Models", - "description": "Wrapper around OpenAI large language models that use the Chat endpoint", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["openAIApi"], - "id": "chatOpenAI_0-input-credential-credential" - }, - { - "label": "Model Name", - "name": "modelName", - "type": "asyncOptions", - "loadMethod": "listModels", - "default": "gpt-3.5-turbo", - "id": "chatOpenAI_0-input-modelName-options" - }, - { - "label": "Temperature", - "name": "temperature", - "type": "number", - "step": 0.1, - "default": 0.9, - "optional": true, - "id": "chatOpenAI_0-input-temperature-number" - }, - { - "label": "Max Tokens", - "name": "maxTokens", - "type": "number", - "step": 1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-maxTokens-number" - }, - { - "label": "Top Probability", - "name": "topP", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-topP-number" - }, - { - "label": "Frequency Penalty", - "name": "frequencyPenalty", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-frequencyPenalty-number" - }, - { - "label": "Presence Penalty", - "name": "presencePenalty", - "type": "number", - "step": 0.1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-presencePenalty-number" - }, - { - "label": "Timeout", - "name": "timeout", - "type": "number", - "step": 1, - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-timeout-number" - }, - { - "label": "BasePath", - "name": "basepath", - "type": "string", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-basepath-string" - }, - { - "label": "BaseOptions", - "name": "baseOptions", - "type": "json", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-baseOptions-json" - }, - { - "label": "Allow Image Uploads", - "name": "allowImageUploads", - "type": "boolean", - "description": "Automatically uses gpt-4-vision-preview when image is being uploaded from chat. Only works with LLMChain, Conversation Chain, ReAct Agent, and Conversational Agent", - "default": false, - "optional": true, - "id": "chatOpenAI_0-input-allowImageUploads-boolean" - }, - { - "label": "Image Resolution", - "description": "This parameter controls the resolution in which the model views the image.", - "name": "imageResolution", - "type": "options", - "options": [ - { - "label": "Low", - "name": "low" - }, - { - "label": "High", - "name": "high" - }, - { - "label": "Auto", - "name": "auto" - } - ], - "default": "low", - "optional": false, - "additionalParams": true, - "id": "chatOpenAI_0-input-imageResolution-options" - } - ], - "inputAnchors": [ - { - "label": "Cache", - "name": "cache", - "type": "BaseCache", - "optional": true, - "id": "chatOpenAI_0-input-cache-BaseCache" - } - ], - "inputs": { - "cache": "", - "modelName": "gpt-3.5-turbo", - "temperature": 0.9, - "maxTokens": "", - "topP": "", - "frequencyPenalty": "", - "presencePenalty": "", - "timeout": "", - "basepath": "", - "baseOptions": "", - "allowImageUploads": true, - "imageResolution": "low" - }, - "outputAnchors": [ - { - "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", - "name": "chatOpenAI", - "label": "ChatOpenAI", - "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | Runnable" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 1532.4907022314349, - "y": -270.38662863532466 - }, - "dragging": false - }, - { - "width": 300, - "height": 555, - "id": "pinecone_0", - "position": { - "x": 1182.9660159923678, - "y": 56.45789225898284 - }, - "type": "customNode", - "data": { - "id": "pinecone_0", - "label": "Pinecone", - "version": 3, - "name": "pinecone", - "type": "Pinecone", - "baseClasses": ["Pinecone", "VectorStoreRetriever", "BaseRetriever"], - "category": "Vector Stores", - "description": "Upsert embedded data and perform similarity or mmr search using Pinecone, a leading fully managed hosted vector database", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["pineconeApi"], - "id": "pinecone_0-input-credential-credential" - }, - { - "label": "Pinecone Index", - "name": "pineconeIndex", - "type": "string", - "id": "pinecone_0-input-pineconeIndex-string" - }, - { - "label": "Pinecone Namespace", - "name": "pineconeNamespace", - "type": "string", - "placeholder": "my-first-namespace", - "additionalParams": true, - "optional": true, - "id": "pinecone_0-input-pineconeNamespace-string" - }, - { - "label": "Pinecone Metadata Filter", - "name": "pineconeMetadataFilter", - "type": "json", - "optional": true, - "additionalParams": true, - "id": "pinecone_0-input-pineconeMetadataFilter-json" - }, - { - "label": "Top K", - "name": "topK", - "description": "Number of top results to fetch. Default to 4", - "placeholder": "4", - "type": "number", - "additionalParams": true, - "optional": true, - "id": "pinecone_0-input-topK-number" - }, - { - "label": "Search Type", - "name": "searchType", - "type": "options", - "default": "similarity", - "options": [ - { - "label": "Similarity", - "name": "similarity" - }, - { - "label": "Max Marginal Relevance", - "name": "mmr" - } - ], - "additionalParams": true, - "optional": true, - "id": "pinecone_0-input-searchType-options" - }, - { - "label": "Fetch K (for MMR Search)", - "name": "fetchK", - "description": "Number of initial documents to fetch for MMR reranking. Default to 20. Used only when the search type is MMR", - "placeholder": "20", - "type": "number", - "additionalParams": true, - "optional": true, - "id": "pinecone_0-input-fetchK-number" - }, - { - "label": "Lambda (for MMR Search)", - "name": "lambda", - "description": "Number between 0 and 1 that determines the degree of diversity among the results, where 0 corresponds to maximum diversity and 1 to minimum diversity. Used only when the search type is MMR", - "placeholder": "0.5", - "type": "number", - "additionalParams": true, - "optional": true, - "id": "pinecone_0-input-lambda-number" - } - ], - "inputAnchors": [ - { - "label": "Document", - "name": "document", - "type": "Document", - "list": true, - "optional": true, - "id": "pinecone_0-input-document-Document" - }, - { - "label": "Embeddings", - "name": "embeddings", - "type": "Embeddings", - "id": "pinecone_0-input-embeddings-Embeddings" - } - ], - "inputs": { - "document": ["{{cheerioWebScraper_0.data.instance}}"], - "embeddings": "{{openAIEmbeddings_0.data.instance}}", - "pineconeIndex": "", - "pineconeNamespace": "", - "pineconeMetadataFilter": "", - "topK": "", - "searchType": "similarity", - "fetchK": "", - "lambda": "" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "options": [ - { - "id": "pinecone_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", - "name": "retriever", - "label": "Pinecone Retriever", - "type": "Pinecone | VectorStoreRetriever | BaseRetriever" - }, - { - "id": "pinecone_0-output-vectorStore-Pinecone|VectorStore", - "name": "vectorStore", - "label": "Pinecone Vector Store", - "type": "Pinecone | VectorStore" - } - ], - "default": "retriever" - } - ], - "outputs": { - "output": "retriever" - }, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 1182.9660159923678, - "y": 56.45789225898284 - }, - "dragging": false - } - ], - "edges": [ - { - "source": "htmlToMarkdownTextSplitter_0", - "sourceHandle": "htmlToMarkdownTextSplitter_0-output-htmlToMarkdownTextSplitter-HtmlToMarkdownTextSplitter|MarkdownTextSplitter|RecursiveCharacterTextSplitter|TextSplitter|BaseDocumentTransformer", - "target": "cheerioWebScraper_0", - "targetHandle": "cheerioWebScraper_0-input-textSplitter-TextSplitter", - "type": "buttonedge", - "id": "htmlToMarkdownTextSplitter_0-htmlToMarkdownTextSplitter_0-output-htmlToMarkdownTextSplitter-HtmlToMarkdownTextSplitter|MarkdownTextSplitter|RecursiveCharacterTextSplitter|TextSplitter|BaseDocumentTransformer-cheerioWebScraper_0-cheerioWebScraper_0-input-textSplitter-TextSplitter", - "data": { - "label": "" - } - }, - { - "source": "chatOpenAI_0", - "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", - "target": "conversationalRetrievalQAChain_0", - "targetHandle": "conversationalRetrievalQAChain_0-input-model-BaseChatModel", - "type": "buttonedge", - "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-model-BaseChatModel", - "data": { - "label": "" - } - }, - { - "source": "cheerioWebScraper_0", - "sourceHandle": "cheerioWebScraper_0-output-cheerioWebScraper-Document", - "target": "pinecone_0", - "targetHandle": "pinecone_0-input-document-Document", - "type": "buttonedge", - "id": "cheerioWebScraper_0-cheerioWebScraper_0-output-cheerioWebScraper-Document-pinecone_0-pinecone_0-input-document-Document", - "data": { - "label": "" - } - }, - { - "source": "openAIEmbeddings_0", - "sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", - "target": "pinecone_0", - "targetHandle": "pinecone_0-input-embeddings-Embeddings", - "type": "buttonedge", - "id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-pinecone_0-pinecone_0-input-embeddings-Embeddings", - "data": { - "label": "" - } - }, - { - "source": "pinecone_0", - "sourceHandle": "pinecone_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", - "target": "conversationalRetrievalQAChain_0", - "targetHandle": "conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", - "type": "buttonedge", - "id": "pinecone_0-pinecone_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", - "data": { - "label": "" - } - } - ] -} diff --git a/packages/server/package.json b/packages/server/package.json index 2075f3949..e86c67bff 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -1,6 +1,6 @@ { "name": "flowise", - "version": "3.0.0", + "version": "3.0.11", "description": "Flowiseai Server", "main": "dist/index", "types": "dist/index.d.ts", @@ -20,7 +20,7 @@ "commands": "./dist/commands" }, "scripts": { - "build": "tsc", + "build": "tsc && rimraf dist/enterprise/emails && gulp", "start": "run-script-os", "clean": "rimraf dist", "nuke": "rimraf dist node_modules .turbo", @@ -28,6 +28,9 @@ "start:default": "cd bin && ./run start", "start-worker:windows": "cd bin && run worker", "start-worker:default": "cd bin && ./run worker", + "user": "run-script-os", + "user:windows": "cd bin && run user", + "user:default": "cd bin && ./run user", "dev": "nodemon", "oclif-dev": "run-script-os", "oclif-dev:windows": "cd bin && dev start", @@ -37,13 +40,14 @@ "typeorm": "typeorm-ts-node-commonjs", "typeorm:migration-generate": "pnpm typeorm migration:generate -d ./src/utils/typeormDataSource.ts", "typeorm:migration-run": "pnpm typeorm migration:run -d ./src/utils/typeormDataSource.ts", + "typeorm:migration-revert": "pnpm typeorm migration:revert -d ./src/utils/typeormDataSource.ts", "watch": "tsc --watch", "version": "oclif readme && git add README.md", "cypress:open": "cypress open", "cypress:run": "cypress run", "e2e": "start-server-and-test dev http://localhost:3000 cypress:run", "cypress:ci": "START_SERVER_AND_TEST_INSECURE=1 start-server-and-test start https-get://localhost:3000 cypress:run", - "test": "jest" + "test": "jest --runInBand --detectOpenHandles --forceExit" }, "keywords": [], "homepage": "https://flowiseai.com", @@ -57,9 +61,12 @@ "license": "SEE LICENSE IN LICENSE.md", "dependencies": { "@aws-sdk/client-secrets-manager": "^3.699.0", + "@bull-board/api": "^6.11.0", + "@bull-board/express": "^6.11.0", "@google-cloud/logging-winston": "^6.0.0", + "@keyv/redis": "^4.2.0", "@oclif/core": "4.0.7", - "@opentelemetry/api": "^1.3.0", + "@opentelemetry/api": "1.9.0", "@opentelemetry/auto-instrumentations-node": "^0.52.0", "@opentelemetry/core": "1.27.0", "@opentelemetry/exporter-metrics-otlp-grpc": "0.54.0", @@ -73,24 +80,41 @@ "@opentelemetry/sdk-node": "^0.54.0", "@opentelemetry/sdk-trace-base": "1.27.0", "@opentelemetry/semantic-conventions": "1.27.0", - "@types/lodash": "^4.14.202", + "@types/bcryptjs": "^2.4.6", + "@types/lodash": "^4.17.20", + "@types/passport": "^1.0.16", + "@types/passport-jwt": "^4.0.1", + "@types/passport-local": "^1.0.38", "@types/uuid": "^9.0.7", "async-mutex": "^0.4.0", - "axios": "1.7.9", - "bull-board": "^2.1.3", - "bullmq": "^5.42.0", + "axios": "1.12.0", + "bcryptjs": "^2.4.3", + "bullmq": "5.45.2", + "cache-manager": "^6.3.2", + "connect-pg-simple": "^10.0.0", + "connect-redis": "^8.0.1", + "connect-sqlite3": "^0.9.15", "content-disposition": "0.5.4", + "cookie-parser": "^1.4.6", "cors": "^2.8.5", "crypto-js": "^4.1.1", + "csv-parser": "^3.0.0", "dotenv": "^16.0.0", "express": "^4.17.3", "express-basic-auth": "^1.2.1", + "express-mysql-session": "^3.0.3", "express-rate-limit": "^6.9.0", + "express-session": "^1.18.1", "flowise-components": "workspace:^", + "flowise-nim-container-manager": "^1.0.11", "flowise-ui": "workspace:^", "global-agent": "^3.0.0", + "gulp": "^4.0.2", + "handlebars": "^4.7.8", "http-errors": "^2.0.0", "http-status-codes": "^2.3.0", + "jsonwebtoken": "^9.0.2", + "jwt-decode": "^4.0.0", "langchainhub": "^0.0.11", "lodash": "^4.17.21", "moment": "^2.29.3", @@ -99,8 +123,17 @@ "multer-cloud-storage": "^4.0.0", "multer-s3": "^3.0.1", "mysql2": "^3.11.3", - "flowise-nim-container-manager": "^1.0.11", + "nanoid": "3", + "nodemailer": "^6.9.14", "openai": "^4.96.0", + "passport": "^0.7.0", + "passport-auth0": "^1.4.4", + "passport-cookie": "^1.0.9", + "passport-github": "^1.1.0", + "passport-google-oauth20": "^2.0.0", + "passport-jwt": "^4.0.1", + "passport-local": "^1.0.0", + "passport-openidconnect": "^0.1.2", "pg": "^8.11.1", "posthog-node": "^3.5.0", "prom-client": "^15.1.3", @@ -109,27 +142,41 @@ "s3-streamlogger": "^1.11.0", "sanitize-html": "^2.11.0", "sqlite3": "^5.1.6", + "stripe": "^15.6.0", "turndown": "^7.2.0", "typeorm": "^0.3.6", "uuid": "^9.0.1", - "winston": "^3.9.0" + "winston": "^3.9.0", + "winston-daily-rotate-file": "^5.0.0" }, "devDependencies": { "@types/content-disposition": "0.5.8", + "@types/cookie-parser": "^1.4.7", "@types/cors": "^2.8.12", "@types/crypto-js": "^4.1.1", + "@types/express-session": "^1.18.0", + "@types/jest": "^29.5.14", + "@types/jsonwebtoken": "^9.0.6", "@types/multer": "^1.4.7", "@types/multer-s3": "^3.0.3", + "@types/nodemailer": "^6.4.15", + "@types/passport-auth0": "^1.0.9", + "@types/passport-github": "^1.1.12", + "@types/passport-openidconnect": "^0.1.3", "@types/sanitize-html": "^2.9.5", + "@types/supertest": "^6.0.3", "@types/turndown": "^5.0.5", "concurrently": "^7.1.0", "cypress": "^13.13.0", + "jest": "^29.7.0", "nodemon": "^2.0.22", - "oclif": "^3", + "oclif": "^4.20.5", "rimraf": "^5.0.5", "run-script-os": "^1.1.6", "shx": "^0.3.3", "start-server-and-test": "^2.0.3", + "supertest": "^7.1.0", + "ts-jest": "^29.3.2", "ts-node": "^10.7.0", "tsc-watch": "^6.0.4", "typescript": "^5.4.5" diff --git a/packages/server/src/AppConfig.ts b/packages/server/src/AppConfig.ts index fa3919aae..8a838254e 100644 --- a/packages/server/src/AppConfig.ts +++ b/packages/server/src/AppConfig.ts @@ -1,7 +1,4 @@ export const appConfig = { - apiKeys: { - storageType: process.env.APIKEY_STORAGE_TYPE ? process.env.APIKEY_STORAGE_TYPE.toLowerCase() : 'json' - }, showCommunityNodes: process.env.SHOW_COMMUNITY_NODES ? process.env.SHOW_COMMUNITY_NODES.toLowerCase() === 'true' : false // todo: add more config options here like database, log, storage, credential and allow modification from UI } diff --git a/packages/server/src/CachePool.ts b/packages/server/src/CachePool.ts index e978d89de..bacb01a58 100644 --- a/packages/server/src/CachePool.ts +++ b/packages/server/src/CachePool.ts @@ -8,6 +8,8 @@ export class CachePool { private redisClient: Redis | null = null activeLLMCache: IActiveCache = {} activeEmbeddingCache: IActiveCache = {} + activeMCPCache: { [key: string]: any } = {} + ssoTokenCache: { [key: string]: any } = {} constructor() { if (process.env.MODE === MODE.QUEUE) { @@ -41,6 +43,46 @@ export class CachePool { } } + /** + * Add to the sso token cache pool + * @param {string} ssoToken + * @param {any} value + */ + async addSSOTokenCache(ssoToken: string, value: any) { + if (process.env.MODE === MODE.QUEUE) { + if (this.redisClient) { + const serializedValue = JSON.stringify(value) + await this.redisClient.set(`ssoTokenCache:${ssoToken}`, serializedValue, 'EX', 120) + } + } else { + this.ssoTokenCache[ssoToken] = value + } + } + + async getSSOTokenCache(ssoToken: string): Promise { + if (process.env.MODE === MODE.QUEUE) { + if (this.redisClient) { + const serializedValue = await this.redisClient.get(`ssoTokenCache:${ssoToken}`) + if (serializedValue) { + return JSON.parse(serializedValue) + } + } + } else { + return this.ssoTokenCache[ssoToken] + } + return undefined + } + + async deleteSSOTokenCache(ssoToken: string) { + if (process.env.MODE === MODE.QUEUE) { + if (this.redisClient) { + await this.redisClient.del(`ssoTokenCache:${ssoToken}`) + } + } else { + delete this.ssoTokenCache[ssoToken] + } + } + /** * Add to the llm cache pool * @param {string} chatflowid @@ -73,6 +115,29 @@ export class CachePool { } } + /** + * Add to the mcp toolkit cache pool + * @param {string} cacheKey + * @param {any} value + */ + async addMCPCache(cacheKey: string, value: any) { + // Only add to cache for non-queue mode, because we are storing the toolkit instances in memory, and we can't store them in redis + if (process.env.MODE !== MODE.QUEUE) { + this.activeMCPCache[`mcpCache:${cacheKey}`] = value + } + } + + /** + * Get item from mcp toolkit cache pool + * @param {string} cacheKey + */ + async getMCPCache(cacheKey: string): Promise { + if (process.env.MODE !== MODE.QUEUE) { + return this.activeMCPCache[`mcpCache:${cacheKey}`] + } + return undefined + } + /** * Get item from llm cache pool * @param {string} chatflowid diff --git a/packages/server/src/DataSource.ts b/packages/server/src/DataSource.ts index 811f62b6a..a2832c30d 100644 --- a/packages/server/src/DataSource.ts +++ b/packages/server/src/DataSource.ts @@ -8,6 +8,7 @@ import { sqliteMigrations } from './database/migrations/sqlite' import { mysqlMigrations } from './database/migrations/mysql' import { mariadbMigrations } from './database/migrations/mariadb' import { postgresMigrations } from './database/migrations/postgres' +import logger from './utils/logger' let appDataSource: DataSource @@ -73,7 +74,17 @@ export const init = async (): Promise => { synchronize: false, migrationsRun: false, entities: Object.values(entities), - migrations: postgresMigrations + migrations: postgresMigrations, + extra: { + idleTimeoutMillis: 120000 + }, + logging: ['error', 'warn', 'info', 'log'], + logger: 'advanced-console', + logNotifications: true, + poolErrorHandler: (err) => { + logger.error(`Database pool error: ${JSON.stringify(err)}`) + }, + applicationName: 'Flowise' }) break default: @@ -97,10 +108,10 @@ export function getDataSource(): DataSource { return appDataSource } -const getDatabaseSSLFromEnv = () => { +export const getDatabaseSSLFromEnv = () => { if (process.env.DATABASE_SSL_KEY_BASE64) { return { - rejectUnauthorized: false, + rejectUnauthorized: process.env.DATABASE_REJECT_UNAUTHORIZED === 'true', ca: Buffer.from(process.env.DATABASE_SSL_KEY_BASE64, 'base64') } } else if (process.env.DATABASE_SSL === 'true') { diff --git a/packages/server/src/IdentityManager.ts b/packages/server/src/IdentityManager.ts new file mode 100644 index 000000000..c56903be9 --- /dev/null +++ b/packages/server/src/IdentityManager.ts @@ -0,0 +1,524 @@ +/** + * Copyright (c) 2023-present FlowiseAI, Inc. + * + * The Enterprise and Cloud versions of Flowise are licensed under the [Commercial License](https://github.com/FlowiseAI/Flowise/tree/main/packages/server/src/enterprise/LICENSE.md). + * Unauthorized copying, modification, distribution, or use of the Enterprise and Cloud versions is strictly prohibited without a valid license agreement from FlowiseAI, Inc. + * + * The Open Source version is licensed under the Apache License, Version 2.0 (the "License") + * + * For information about licensing of the Enterprise and Cloud versions, please contact: + * security@flowiseai.com + */ + +import axios from 'axios' +import express, { Application, NextFunction, Request, Response } from 'express' +import * as fs from 'fs' +import { StatusCodes } from 'http-status-codes' +import jwt from 'jsonwebtoken' +import path from 'path' +import { LoginMethodStatus } from './enterprise/database/entities/login-method.entity' +import { ErrorMessage, LoggedInUser } from './enterprise/Interface.Enterprise' +import { Permissions } from './enterprise/rbac/Permissions' +import { LoginMethodService } from './enterprise/services/login-method.service' +import { OrganizationService } from './enterprise/services/organization.service' +import Auth0SSO from './enterprise/sso/Auth0SSO' +import AzureSSO from './enterprise/sso/AzureSSO' +import GithubSSO from './enterprise/sso/GithubSSO' +import GoogleSSO from './enterprise/sso/GoogleSSO' +import SSOBase from './enterprise/sso/SSOBase' +import { InternalFlowiseError } from './errors/internalFlowiseError' +import { Platform, UserPlan } from './Interface' +import { StripeManager } from './StripeManager' +import { UsageCacheManager } from './UsageCacheManager' +import { GeneralErrorMessage, LICENSE_QUOTAS } from './utils/constants' +import { getRunningExpressApp } from './utils/getRunningExpressApp' +import { ENTERPRISE_FEATURE_FLAGS } from './utils/quotaUsage' +import Stripe from 'stripe' + +const allSSOProviders = ['azure', 'google', 'auth0', 'github'] +export class IdentityManager { + private static instance: IdentityManager + private stripeManager?: StripeManager + licenseValid: boolean = false + permissions: Permissions + ssoProviderName: string = '' + currentInstancePlatform: Platform = Platform.OPEN_SOURCE + // create a map to store the sso provider name and the sso provider instance + ssoProviders: Map = new Map() + + public static async getInstance(): Promise { + if (!IdentityManager.instance) { + IdentityManager.instance = new IdentityManager() + await IdentityManager.instance.initialize() + } + return IdentityManager.instance + } + + public async initialize() { + await this._validateLicenseKey() + this.permissions = new Permissions() + if (process.env.STRIPE_SECRET_KEY) { + this.stripeManager = await StripeManager.getInstance() + } + } + + public getPlatformType = () => { + return this.currentInstancePlatform + } + + public getPermissions = () => { + return this.permissions + } + + public isEnterprise = () => { + return this.currentInstancePlatform === Platform.ENTERPRISE + } + + public isCloud = () => { + return this.currentInstancePlatform === Platform.CLOUD + } + + public isOpenSource = () => { + return this.currentInstancePlatform === Platform.OPEN_SOURCE + } + + public isLicenseValid = () => { + return this.licenseValid + } + + private _offlineVerifyLicense(licenseKey: string): any { + try { + const publicKey = fs.readFileSync(path.join(__dirname, '../', 'src/enterprise/license/public.pem'), 'utf8') + const decoded = jwt.verify(licenseKey, publicKey, { + algorithms: ['RS256'] + }) + return decoded + } catch (error) { + console.error('Error verifying license key:', error) + return null + } + } + + private _validateLicenseKey = async () => { + const LICENSE_URL = process.env.LICENSE_URL + const FLOWISE_EE_LICENSE_KEY = process.env.FLOWISE_EE_LICENSE_KEY + + // First check if license key is missing + if (!FLOWISE_EE_LICENSE_KEY) { + this.licenseValid = false + this.currentInstancePlatform = Platform.OPEN_SOURCE + return + } + + try { + if (process.env.OFFLINE === 'true') { + const decodedLicense = this._offlineVerifyLicense(FLOWISE_EE_LICENSE_KEY) + + if (!decodedLicense) { + this.licenseValid = false + } else { + const issuedAtSeconds = decodedLicense.iat + if (!issuedAtSeconds) { + this.licenseValid = false + } else { + const issuedAt = new Date(issuedAtSeconds * 1000) + const expiryDurationInMonths = decodedLicense.expiryDurationInMonths || 0 + + const expiryDate = new Date(issuedAt) + expiryDate.setMonth(expiryDate.getMonth() + expiryDurationInMonths) + + if (new Date() > expiryDate) { + this.licenseValid = false + } else { + this.licenseValid = true + } + } + } + this.currentInstancePlatform = Platform.ENTERPRISE + } else if (LICENSE_URL) { + try { + const response = await axios.post(`${LICENSE_URL}/enterprise/verify`, { license: FLOWISE_EE_LICENSE_KEY }) + this.licenseValid = response.data?.valid + + if (!LICENSE_URL.includes('api')) this.currentInstancePlatform = Platform.ENTERPRISE + else if (LICENSE_URL.includes('v1')) this.currentInstancePlatform = Platform.ENTERPRISE + else if (LICENSE_URL.includes('v2')) this.currentInstancePlatform = response.data?.platform + else throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, GeneralErrorMessage.UNHANDLED_EDGE_CASE) + } catch (error) { + console.error('Error verifying license key:', error) + this.licenseValid = false + this.currentInstancePlatform = Platform.ENTERPRISE + return + } + } + } catch (error) { + this.licenseValid = false + } + } + + public initializeSSO = async (app: express.Application) => { + if (this.getPlatformType() === Platform.CLOUD || this.getPlatformType() === Platform.ENTERPRISE) { + const loginMethodService = new LoginMethodService() + let queryRunner + try { + queryRunner = getRunningExpressApp().AppDataSource.createQueryRunner() + await queryRunner.connect() + let organizationId = undefined + if (this.getPlatformType() === Platform.ENTERPRISE) { + const organizationService = new OrganizationService() + const organizations = await organizationService.readOrganization(queryRunner) + if (organizations.length > 0) { + organizationId = organizations[0].id + } else { + this.initializeEmptySSO(app) + return + } + } + const loginMethods = await loginMethodService.readLoginMethodByOrganizationId(organizationId, queryRunner) + if (loginMethods && loginMethods.length > 0) { + for (let method of loginMethods) { + if (method.status === LoginMethodStatus.ENABLE) { + method.config = JSON.parse(await loginMethodService.decryptLoginMethodConfig(method.config)) + this.initializeSsoProvider(app, method.name, method.config) + } + } + } + } finally { + if (queryRunner) await queryRunner.release() + } + } + // iterate through the remaining providers and initialize them with configEnabled as false + this.initializeEmptySSO(app) + } + + initializeEmptySSO(app: Application) { + allSSOProviders.map((providerName) => { + if (!this.ssoProviders.has(providerName)) { + this.initializeSsoProvider(app, providerName, undefined) + } + }) + } + + initializeSsoProvider(app: Application, providerName: string, providerConfig: any) { + if (this.ssoProviders.has(providerName)) { + const provider = this.ssoProviders.get(providerName) + if (provider) { + if (providerConfig && providerConfig.configEnabled === true) { + provider.setSSOConfig(providerConfig) + provider.initialize() + } else { + // if false, disable the provider + provider.setSSOConfig(undefined) + } + } + } else { + switch (providerName) { + case 'azure': { + const azureSSO = new AzureSSO(app, providerConfig) + azureSSO.initialize() + this.ssoProviders.set(providerName, azureSSO) + break + } + case 'google': { + const googleSSO = new GoogleSSO(app, providerConfig) + googleSSO.initialize() + this.ssoProviders.set(providerName, googleSSO) + break + } + case 'auth0': { + const auth0SSO = new Auth0SSO(app, providerConfig) + auth0SSO.initialize() + this.ssoProviders.set(providerName, auth0SSO) + break + } + case 'github': { + const githubSSO = new GithubSSO(app, providerConfig) + githubSSO.initialize() + this.ssoProviders.set(providerName, githubSSO) + break + } + default: + throw new Error(`SSO Provider ${providerName} not found`) + } + } + } + + async getRefreshToken(providerName: any, ssoRefreshToken: string) { + if (!this.ssoProviders.has(providerName)) { + throw new Error(`SSO Provider ${providerName} not found`) + } + return await (this.ssoProviders.get(providerName) as SSOBase).refreshToken(ssoRefreshToken) + } + + public async getProductIdFromSubscription(subscriptionId: string) { + if (!subscriptionId) return '' + if (!this.stripeManager) { + throw new Error('Stripe manager is not initialized') + } + return await this.stripeManager.getProductIdFromSubscription(subscriptionId) + } + + public async getFeaturesByPlan(subscriptionId: string, withoutCache: boolean = false) { + if (this.isEnterprise()) { + const features: Record = {} + for (const feature of ENTERPRISE_FEATURE_FLAGS) { + features[feature] = 'true' + } + return features + } else if (this.isCloud()) { + if (!this.stripeManager || !subscriptionId) { + return {} + } + return await this.stripeManager.getFeaturesByPlan(subscriptionId, withoutCache) + } + return {} + } + + public static checkFeatureByPlan(feature: string) { + return (req: Request, res: Response, next: NextFunction) => { + const user = req.user + if (user) { + if (!user.features || Object.keys(user.features).length === 0) { + return res.status(403).json({ message: ErrorMessage.FORBIDDEN }) + } + if (Object.keys(user.features).includes(feature) && user.features[feature] === 'true') { + return next() + } + } + return res.status(403).json({ message: ErrorMessage.FORBIDDEN }) + } + } + + public async createStripeCustomerPortalSession(req: Request) { + if (!this.stripeManager) { + throw new Error('Stripe manager is not initialized') + } + return await this.stripeManager.createStripeCustomerPortalSession(req) + } + + public async getAdditionalSeatsQuantity(subscriptionId: string) { + if (!subscriptionId) return {} + if (!this.stripeManager) { + throw new Error('Stripe manager is not initialized') + } + return await this.stripeManager.getAdditionalSeatsQuantity(subscriptionId) + } + + public async getCustomerWithDefaultSource(customerId: string) { + if (!customerId) return + if (!this.stripeManager) { + throw new Error('Stripe manager is not initialized') + } + return await this.stripeManager.getCustomerWithDefaultSource(customerId) + } + + public async getAdditionalSeatsProration(subscriptionId: string, newQuantity: number) { + if (!subscriptionId) return {} + if (!this.stripeManager) { + throw new Error('Stripe manager is not initialized') + } + return await this.stripeManager.getAdditionalSeatsProration(subscriptionId, newQuantity) + } + + public async updateAdditionalSeats(subscriptionId: string, quantity: number, prorationDate: number) { + if (!subscriptionId) return {} + + if (!this.stripeManager) { + throw new Error('Stripe manager is not initialized') + } + const { success, subscription, invoice } = await this.stripeManager.updateAdditionalSeats(subscriptionId, quantity, prorationDate) + + // Fetch product details to get quotas + const items = subscription.items.data + if (items.length === 0) { + throw new Error('No subscription items found') + } + + const productId = items[0].price.product as string + const product = await this.stripeManager.getStripe().products.retrieve(productId) + const productMetadata = product.metadata + + // Extract quotas from metadata + const quotas: Record = {} + for (const key in productMetadata) { + if (key.startsWith('quota:')) { + quotas[key] = parseInt(productMetadata[key]) + } + } + quotas[LICENSE_QUOTAS.ADDITIONAL_SEATS_LIMIT] = quantity + + // Get features from Stripe + const features = await this.getFeaturesByPlan(subscription.id, true) + + // Update the cache with new subscription data including quotas + const cacheManager = await UsageCacheManager.getInstance() + await cacheManager.updateSubscriptionDataToCache(subscriptionId, { + features, + quotas, + subsriptionDetails: this.stripeManager.getSubscriptionObject(subscription) + }) + + return { success, subscription, invoice } + } + + public async getPlanProration(subscriptionId: string, newPlanId: string) { + if (!subscriptionId || !newPlanId) return {} + + if (!this.stripeManager) { + throw new Error('Stripe manager is not initialized') + } + return await this.stripeManager.getPlanProration(subscriptionId, newPlanId) + } + + public async updateSubscriptionPlan(req: Request, subscriptionId: string, newPlanId: string, prorationDate: number) { + if (!subscriptionId || !newPlanId) return {} + + if (!this.stripeManager) { + throw new Error('Stripe manager is not initialized') + } + if (!req.user) { + throw new InternalFlowiseError(StatusCodes.UNAUTHORIZED, GeneralErrorMessage.UNAUTHORIZED) + } + const { success, subscription } = await this.stripeManager.updateSubscriptionPlan(subscriptionId, newPlanId, prorationDate) + if (success) { + // Fetch product details to get quotas + const product = await this.stripeManager.getStripe().products.retrieve(newPlanId) + const productMetadata = product.metadata + + // Extract quotas from metadata + const quotas: Record = {} + for (const key in productMetadata) { + if (key.startsWith('quota:')) { + quotas[key] = parseInt(productMetadata[key]) + } + } + + const additionalSeatsItem = subscription.items.data.find( + (item) => (item.price.product as string) === process.env.ADDITIONAL_SEAT_ID + ) + quotas[LICENSE_QUOTAS.ADDITIONAL_SEATS_LIMIT] = additionalSeatsItem?.quantity || 0 + + // Get features from Stripe + const features = await this.getFeaturesByPlan(subscription.id, true) + + // Update the cache with new subscription data including quotas + const cacheManager = await UsageCacheManager.getInstance() + + const updateCacheData: Record = { + features, + quotas, + subsriptionDetails: this.stripeManager.getSubscriptionObject(subscription) + } + + if ( + newPlanId === process.env.CLOUD_FREE_ID || + newPlanId === process.env.CLOUD_STARTER_ID || + newPlanId === process.env.CLOUD_PRO_ID + ) { + updateCacheData.productId = newPlanId + } + + await cacheManager.updateSubscriptionDataToCache(subscriptionId, updateCacheData) + + const loggedInUser: LoggedInUser = { + ...req.user, + activeOrganizationSubscriptionId: subscription.id, + features + } + + if ( + newPlanId === process.env.CLOUD_FREE_ID || + newPlanId === process.env.CLOUD_STARTER_ID || + newPlanId === process.env.CLOUD_PRO_ID + ) { + loggedInUser.activeOrganizationProductId = newPlanId + } + + req.user = { + ...req.user, + ...loggedInUser + } + + // Update passport session + // @ts-ignore + req.session.passport.user = { + ...req.user, + ...loggedInUser + } + + req.session.save((err) => { + if (err) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, GeneralErrorMessage.UNHANDLED_EDGE_CASE) + }) + + return { + status: 'success', + user: loggedInUser + } + } + return { + status: 'error', + message: 'Payment or subscription update not completed' + } + } + + public async createStripeUserAndSubscribe({ email, userPlan, referral }: { email: string; userPlan: UserPlan; referral?: string }) { + if (!this.stripeManager) { + throw new Error('Stripe manager is not initialized') + } + + try { + // Create a customer in Stripe + let customer: Stripe.Response + if (referral) { + customer = await this.stripeManager.getStripe().customers.create({ + email: email, + metadata: { + referral + } + }) + } else { + customer = await this.stripeManager.getStripe().customers.create({ + email: email + }) + } + + let productId = '' + switch (userPlan) { + case UserPlan.STARTER: + productId = process.env.CLOUD_STARTER_ID as string + break + case UserPlan.PRO: + productId = process.env.CLOUD_PRO_ID as string + break + case UserPlan.FREE: + productId = process.env.CLOUD_FREE_ID as string + break + } + + // Get the default price ID for the product + const prices = await this.stripeManager.getStripe().prices.list({ + product: productId, + active: true, + limit: 1 + }) + + if (!prices.data.length) { + throw new Error('No active price found for the product') + } + + // Create the subscription + const subscription = await this.stripeManager.getStripe().subscriptions.create({ + customer: customer.id, + items: [{ price: prices.data[0].id }] + }) + + return { + customerId: customer.id, + subscriptionId: subscription.id + } + } catch (error) { + console.error('Error creating Stripe user and subscription:', error) + throw error + } + } +} diff --git a/packages/server/src/Interface.DocumentStore.ts b/packages/server/src/Interface.DocumentStore.ts index 34d92978f..a7f84f92e 100644 --- a/packages/server/src/Interface.DocumentStore.ts +++ b/packages/server/src/Interface.DocumentStore.ts @@ -4,6 +4,7 @@ import { DataSource } from 'typeorm' import { IComponentNodes } from './Interface' import { Telemetry } from './utils/telemetry' import { CachePool } from './CachePool' +import { UsageCacheManager } from './UsageCacheManager' export enum DocumentStoreStatus { EMPTY_SYNC = 'EMPTY', @@ -27,6 +28,7 @@ export interface IDocumentStore { vectorStoreConfig: string | null // JSON string embeddingConfig: string | null // JSON string recordManagerConfig: string | null // JSON string + workspaceId?: string } export interface IDocumentStoreFileChunk { @@ -47,6 +49,7 @@ export interface IDocumentStoreFileChunkPagedResponse { storeName: string description: string docId: string + workspaceId?: string } export interface IDocumentStoreLoader { @@ -78,6 +81,7 @@ export interface IDocumentStoreUpsertData { replaceExisting?: boolean createNewDocStore?: boolean docStore?: IDocumentStore + loaderName?: string loader?: { name: string config: ICommonObject @@ -119,9 +123,13 @@ export interface IDocumentStoreWhereUsed { } export interface IUpsertQueueAppServer { + orgId: string + workspaceId: string + subscriptionId: string appDataSource: DataSource componentNodes: IComponentNodes telemetry: Telemetry + usageCacheManager: UsageCacheManager cachePool?: CachePool } @@ -231,6 +239,7 @@ export class DocumentStoreDTO { totalChunks: number totalChars: number chunkSize: number + workspaceId?: string loaders: IDocumentStoreLoader[] vectorStoreConfig: any embeddingConfig: any @@ -246,6 +255,7 @@ export class DocumentStoreDTO { documentStoreDTO.name = entity.name documentStoreDTO.description = entity.description documentStoreDTO.status = entity.status + documentStoreDTO.workspaceId = entity.workspaceId documentStoreDTO.totalChars = 0 documentStoreDTO.totalChunks = 0 @@ -281,14 +291,17 @@ export class DocumentStoreDTO { } static fromEntities(entities: DocumentStore[]): DocumentStoreDTO[] { + if (entities.length === 0) { + return [] + } return entities.map((entity) => this.fromEntity(entity)) } static toEntity(body: any): DocumentStore { const docStore = new DocumentStore() Object.assign(docStore, body) - docStore.loaders = '[]' - docStore.whereUsed = '[]' + docStore.loaders = body.loaders ?? '[]' + docStore.whereUsed = body.whereUsed ?? '[]' // when a new document store is created, it is empty and in sync docStore.status = DocumentStoreStatus.EMPTY_SYNC return docStore diff --git a/packages/server/src/Interface.Evaluation.ts b/packages/server/src/Interface.Evaluation.ts new file mode 100644 index 000000000..ad0015137 --- /dev/null +++ b/packages/server/src/Interface.Evaluation.ts @@ -0,0 +1,139 @@ +// Evaluation Related Interfaces +import { Evaluator } from './database/entities/Evaluator' + +export interface IDataset { + id: string + name: string + description: string + createdDate: Date + updatedDate: Date + workspaceId?: string +} +export interface IDatasetRow { + id: string + datasetId: string + input: string + output: string + updatedDate: Date + sequenceNo: number +} + +export enum EvaluationStatus { + PENDING = 'pending', + COMPLETED = 'completed', + ERROR = 'error' +} + +export interface IEvaluation { + id: string + name: string + chatflowId: string + chatflowName: string + datasetId: string + datasetName: string + evaluationType: string + additionalConfig: string //json + average_metrics: string //json + status: string + runDate: Date + workspaceId?: string +} + +export interface IEvaluationResult extends IEvaluation { + latestEval: boolean + version: number +} + +export interface IEvaluationRun { + id: string + evaluationId: string + input: string + expectedOutput: string + actualOutput: string // JSON + metrics: string // JSON + runDate: Date + llmEvaluators?: string // JSON + evaluators?: string // JSON + errors?: string // JSON +} + +export interface IEvaluator { + id: string + name: string + type: string + config: string // JSON + updatedDate: Date + createdDate: Date + workspaceId?: string +} + +export class EvaluatorDTO { + id: string + name: string + type: string + measure?: string + operator?: string + value?: string + prompt?: string + evaluatorType?: string + outputSchema?: [] + updatedDate: Date + createdDate: Date + + static toEntity(body: any): Evaluator { + const newDs = new Evaluator() + Object.assign(newDs, body) + let config: any = {} + if (body.type === 'llm') { + config = { + prompt: body.prompt, + outputSchema: body.outputSchema + } + } else if (body.type === 'text') { + config = { + operator: body.operator, + value: body.value + } + } else if (body.type === 'json') { + config = { + operator: body.operator + } + } else if (body.type === 'numeric') { + config = { + operator: body.operator, + value: body.value, + measure: body.measure + } + } else { + throw new Error('Invalid evaluator type') + } + newDs.config = JSON.stringify(config) + return newDs + } + + static fromEntity(entity: Evaluator): EvaluatorDTO { + const newDs = new EvaluatorDTO() + Object.assign(newDs, entity) + const config = JSON.parse(entity.config) + if (entity.type === 'llm') { + newDs.prompt = config.prompt + newDs.outputSchema = config.outputSchema + } else if (entity.type === 'text') { + newDs.operator = config.operator + newDs.value = config.value + } else if (entity.type === 'json') { + newDs.operator = config.operator + newDs.value = config.value + } else if (entity.type === 'numeric') { + newDs.operator = config.operator + newDs.value = config.value + newDs.measure = config.measure + } + delete (newDs as any).config + return newDs + } + + static fromEntities(entities: Evaluator[]): EvaluatorDTO[] { + return entities.map((entity) => this.fromEntity(entity)) + } +} diff --git a/packages/server/src/Interface.ts b/packages/server/src/Interface.ts index 7865727ce..f1c603043 100644 --- a/packages/server/src/Interface.ts +++ b/packages/server/src/Interface.ts @@ -12,6 +12,7 @@ import { import { DataSource } from 'typeorm' import { CachePool } from './CachePool' import { Telemetry } from './utils/telemetry' +import { UsageCacheManager } from './UsageCacheManager' export type MessageType = 'apiMessage' | 'userMessage' @@ -28,13 +29,27 @@ export enum MODE { export enum ChatType { INTERNAL = 'INTERNAL', - EXTERNAL = 'EXTERNAL' + EXTERNAL = 'EXTERNAL', + EVALUATION = 'EVALUATION' } export enum ChatMessageRatingType { THUMBS_UP = 'THUMBS_UP', THUMBS_DOWN = 'THUMBS_DOWN' } + +export enum Platform { + OPEN_SOURCE = 'open source', + CLOUD = 'cloud', + ENTERPRISE = 'enterprise' +} + +export enum UserPlan { + STARTER = 'STARTER', + PRO = 'PRO', + FREE = 'FREE' +} + /** * Databases */ @@ -49,11 +64,13 @@ export interface IChatFlow { apikeyid?: string analytic?: string speechToText?: string + textToSpeech?: string chatbotConfig?: string followUpPrompts?: string apiConfig?: string category?: string type?: ChatflowType + workspaceId: string } export interface IChatMessage { @@ -98,6 +115,7 @@ export interface ITool { func?: string updatedDate: Date createdDate: Date + workspaceId: string } export interface IAssistant { @@ -107,6 +125,7 @@ export interface IAssistant { iconSrc?: string updatedDate: Date createdDate: Date + workspaceId: string } export interface ICredential { @@ -116,6 +135,7 @@ export interface ICredential { encryptedData: string updatedDate: Date createdDate: Date + workspaceId: string } export interface IVariable { @@ -125,6 +145,7 @@ export interface IVariable { type: string updatedDate: Date createdDate: Date + workspaceId: string } export interface ILead { @@ -156,6 +177,7 @@ export interface IExecution { createdDate: Date updatedDate: Date stoppedDate: Date + workspaceId: string } export interface IComponentNodes { @@ -301,7 +323,7 @@ export interface IOverrideConfig { label: string name: string type: string - schema?: ICommonObject[] + schema?: ICommonObject[] | Record } export type ICredentialDataDecrypted = ICommonObject @@ -311,6 +333,7 @@ export interface ICredentialReqBody { name: string credentialName: string plainDataObj: ICredentialDataDecrypted + workspaceId: string } // Decrypted credential object sent back to client @@ -329,6 +352,7 @@ export interface IApiKey { apiKey: string apiSecret: string updatedDate: Date + workspaceId: string } export interface ICustomTemplate { @@ -342,10 +366,12 @@ export interface ICustomTemplate { badge?: string framework?: string usecases?: string + workspaceId: string } export interface IFlowConfig { chatflowid: string + chatflowId: string chatId: string sessionId: string chatHistory: IMessage[] @@ -361,14 +387,21 @@ export interface IPredictionQueueAppServer { sseStreamer: IServerSideEventStreamer telemetry: Telemetry cachePool: CachePool + usageCacheManager: UsageCacheManager } export interface IExecuteFlowParams extends IPredictionQueueAppServer { incomingInput: IncomingInput chatflow: IChatFlow chatId: string + orgId: string + workspaceId: string + subscriptionId: string + productId: string baseURL: string isInternal: boolean + isEvaluation?: boolean + evaluationRunId?: string signal?: AbortController files?: Express.Multer.File[] fileUploads?: IFileUpload[] @@ -398,3 +431,6 @@ export interface IVariableOverride { // DocumentStore related export * from './Interface.DocumentStore' + +// Evaluations related +export * from './Interface.Evaluation' diff --git a/packages/server/src/StripeManager.ts b/packages/server/src/StripeManager.ts new file mode 100644 index 000000000..806349d97 --- /dev/null +++ b/packages/server/src/StripeManager.ts @@ -0,0 +1,609 @@ +import Stripe from 'stripe' +import { Request } from 'express' +import { UsageCacheManager } from './UsageCacheManager' +import { UserPlan } from './Interface' +import { LICENSE_QUOTAS } from './utils/constants' + +export class StripeManager { + private static instance: StripeManager + private stripe?: Stripe + private cacheManager: UsageCacheManager + + public static async getInstance(): Promise { + if (!StripeManager.instance) { + StripeManager.instance = new StripeManager() + await StripeManager.instance.initialize() + } + return StripeManager.instance + } + + private async initialize() { + if (!this.stripe && process.env.STRIPE_SECRET_KEY) { + this.stripe = new Stripe(process.env.STRIPE_SECRET_KEY) + } + this.cacheManager = await UsageCacheManager.getInstance() + } + + public getStripe() { + if (!this.stripe) throw new Error('Stripe is not initialized') + return this.stripe + } + + public getSubscriptionObject(subscription: Stripe.Response) { + return { + customer: subscription.customer, + status: subscription.status, + created: subscription.created + } + } + + public async getProductIdFromSubscription(subscriptionId: string) { + if (!subscriptionId || subscriptionId.trim() === '') { + return '' + } + + if (!this.stripe) { + throw new Error('Stripe is not initialized') + } + + const subscriptionData = await this.cacheManager.getSubscriptionDataFromCache(subscriptionId) + if (subscriptionData?.productId) { + return subscriptionData.productId + } + + try { + const subscription = await this.stripe.subscriptions.retrieve(subscriptionId) + const items = subscription.items.data + if (items.length === 0) { + return '' + } + + const productId = items[0].price.product as string + await this.cacheManager.updateSubscriptionDataToCache(subscriptionId, { + productId, + subsriptionDetails: this.getSubscriptionObject(subscription) + }) + + return productId + } catch (error) { + return '' + } + } + + public async getFeaturesByPlan(subscriptionId: string, withoutCache: boolean = false) { + if (!this.stripe || !subscriptionId) { + return {} + } + + if (!withoutCache) { + const subscriptionData = await this.cacheManager.getSubscriptionDataFromCache(subscriptionId) + if (subscriptionData?.features) { + return subscriptionData.features + } + } + + const subscription = await this.stripe.subscriptions.retrieve(subscriptionId, { + timeout: 5000 + }) + const items = subscription.items.data + if (items.length === 0) { + return {} + } + + const productId = items[0].price.product as string + const product = await this.stripe.products.retrieve(productId, { + timeout: 5000 + }) + const productMetadata = product.metadata + + if (!productMetadata || Object.keys(productMetadata).length === 0) { + return {} + } + + const features: Record = {} + for (const key in productMetadata) { + if (key.startsWith('feat:')) { + features[key] = productMetadata[key] + } + } + + await this.cacheManager.updateSubscriptionDataToCache(subscriptionId, { + features, + subsriptionDetails: this.getSubscriptionObject(subscription) + }) + + return features + } + + public async createStripeCustomerPortalSession(req: Request) { + if (!this.stripe) { + throw new Error('Stripe is not initialized') + } + + const customerId = req.user?.activeOrganizationCustomerId + if (!customerId) { + throw new Error('Customer ID is required') + } + + const subscriptionId = req.user?.activeOrganizationSubscriptionId + if (!subscriptionId) { + throw new Error('Subscription ID is required') + } + + try { + const prodPriceIds = await this.getPriceIds() + const configuration = await this.createPortalConfiguration(prodPriceIds) + + const portalSession = await this.stripe.billingPortal.sessions.create({ + customer: customerId, + configuration: configuration.id, + return_url: `${process.env.APP_URL}/account` + /* We can't have flow_data because it does not support multiple subscription items + flow_data: { + type: 'subscription_update', + subscription_update: { + subscription: subscriptionId + }, + after_completion: { + type: 'redirect', + redirect: { + return_url: `${process.env.APP_URL}/account/subscription?subscriptionId=${subscriptionId}` + } + } + }*/ + }) + + return { url: portalSession.url } + } catch (error) { + console.error('Error creating customer portal session:', error) + throw error + } + } + + private async getPriceIds() { + const prodPriceIds: Record = { + [UserPlan.STARTER]: { + product: process.env.CLOUD_STARTER_ID as string, + price: '' + }, + [UserPlan.PRO]: { + product: process.env.CLOUD_PRO_ID as string, + price: '' + }, + [UserPlan.FREE]: { + product: process.env.CLOUD_FREE_ID as string, + price: '' + }, + SEAT: { + product: process.env.ADDITIONAL_SEAT_ID as string, + price: '' + } + } + + for (const key in prodPriceIds) { + const prices = await this.stripe!.prices.list({ + product: prodPriceIds[key].product, + active: true, + limit: 1 + }) + + if (prices.data.length) { + prodPriceIds[key].price = prices.data[0].id + } + } + + return prodPriceIds + } + + private async createPortalConfiguration(_: Record) { + return await this.stripe!.billingPortal.configurations.create({ + business_profile: { + privacy_policy_url: `${process.env.APP_URL}/privacy-policy`, + terms_of_service_url: `${process.env.APP_URL}/terms-of-service` + }, + features: { + invoice_history: { + enabled: true + }, + payment_method_update: { + enabled: true + }, + subscription_cancel: { + enabled: false + } + /*subscription_update: { + enabled: false, + default_allowed_updates: ['price'], + products: [ + { + product: prodPriceIds[UserPlan.FREE].product, + prices: [prodPriceIds[UserPlan.FREE].price] + }, + { + product: prodPriceIds[UserPlan.STARTER].product, + prices: [prodPriceIds[UserPlan.STARTER].price] + }, + { + product: prodPriceIds[UserPlan.PRO].product, + prices: [prodPriceIds[UserPlan.PRO].price] + } + ], + proration_behavior: 'always_invoice' + }*/ + } + }) + } + + public async getAdditionalSeatsQuantity(subscriptionId: string): Promise<{ quantity: number; includedSeats: number }> { + if (!this.stripe) { + throw new Error('Stripe is not initialized') + } + + try { + const subscription = await this.stripe.subscriptions.retrieve(subscriptionId) + const additionalSeatsItem = subscription.items.data.find( + (item) => (item.price.product as string) === process.env.ADDITIONAL_SEAT_ID + ) + const quotas = await this.cacheManager.getQuotas(subscriptionId) + + return { quantity: additionalSeatsItem?.quantity || 0, includedSeats: quotas[LICENSE_QUOTAS.USERS_LIMIT] } + } catch (error) { + console.error('Error getting additional seats quantity:', error) + throw error + } + } + + public async getCustomerWithDefaultSource(customerId: string) { + if (!this.stripe) { + throw new Error('Stripe is not initialized') + } + + try { + const customer = (await this.stripe.customers.retrieve(customerId, { + expand: ['default_source', 'invoice_settings.default_payment_method'] + })) as Stripe.Customer + + return customer + } catch (error) { + console.error('Error retrieving customer with default source:', error) + throw error + } + } + + public async getAdditionalSeatsProration(subscriptionId: string, quantity: number) { + if (!this.stripe) { + throw new Error('Stripe is not initialized') + } + + try { + const subscription = await this.stripe.subscriptions.retrieve(subscriptionId) + + // Get customer's credit balance + const customer = await this.stripe.customers.retrieve(subscription.customer as string) + const creditBalance = (customer as Stripe.Customer).balance // Balance is in cents, negative for credit, positive for amount owed + + // Get the current subscription's base price (without seats) + const basePlanItem = subscription.items.data.find((item) => (item.price.product as string) !== process.env.ADDITIONAL_SEAT_ID) + const basePlanAmount = basePlanItem ? basePlanItem.price.unit_amount! * 1 : 0 + + const existingInvoice = await this.stripe.invoices.retrieveUpcoming({ + customer: subscription.customer as string, + subscription: subscriptionId + }) + + const existingInvoiceTotal = existingInvoice.total + + // Get the price ID for additional seats + const prices = await this.stripe.prices.list({ + product: process.env.ADDITIONAL_SEAT_ID, + active: true, + limit: 1 + }) + + if (prices.data.length === 0) { + throw new Error('No active price found for additional seats') + } + + const seatPrice = prices.data[0] + const pricePerSeat = seatPrice.unit_amount || 0 + + // Use current timestamp for proration calculation + const prorationDate = Math.floor(Date.now() / 1000) + + const additionalSeatsItem = subscription.items.data.find( + (item) => (item.price.product as string) === process.env.ADDITIONAL_SEAT_ID + ) + + const upcomingInvoice = await this.stripe.invoices.retrieveUpcoming({ + customer: subscription.customer as string, + subscription: subscriptionId, + subscription_details: { + proration_behavior: 'always_invoice', + proration_date: prorationDate, + items: [ + additionalSeatsItem + ? { + id: additionalSeatsItem.id, + quantity: quantity + } + : { + // If the item doesn't exist yet, create a new one + // This will be used to calculate the proration amount + price: prices.data[0].id, + quantity: quantity + } + ] + } + }) + + // Calculate proration amount from the relevant line items + // Only consider prorations that match our proration date + const prorationLineItems = upcomingInvoice.lines.data.filter( + (line) => line.type === 'invoiceitem' && line.period.start === prorationDate + ) + + const prorationAmount = prorationLineItems.reduce((total, item) => total + item.amount, 0) + + return { + basePlanAmount: basePlanAmount / 100, + additionalSeatsProratedAmount: (existingInvoiceTotal + prorationAmount - basePlanAmount) / 100, + seatPerUnitPrice: pricePerSeat / 100, + prorationAmount: prorationAmount / 100, + creditBalance: creditBalance / 100, + nextInvoiceTotal: (existingInvoiceTotal + prorationAmount) / 100, + currency: upcomingInvoice.currency.toUpperCase(), + prorationDate, + currentPeriodStart: subscription.current_period_start, + currentPeriodEnd: subscription.current_period_end + } + } catch (error) { + console.error('Error calculating additional seats proration:', error) + throw error + } + } + + public async updateAdditionalSeats(subscriptionId: string, quantity: number, prorationDate: number) { + if (!this.stripe) { + throw new Error('Stripe is not initialized') + } + + try { + const subscription = await this.stripe.subscriptions.retrieve(subscriptionId) + const additionalSeatsItem = subscription.items.data.find( + (item) => (item.price.product as string) === process.env.ADDITIONAL_SEAT_ID + ) + + // Get the price ID for additional seats if needed + const prices = await this.stripe.prices.list({ + product: process.env.ADDITIONAL_SEAT_ID, + active: true, + limit: 1 + }) + + if (prices.data.length === 0) { + throw new Error('No active price found for additional seats') + } + + // Create an invoice immediately for the proration + const updatedSubscription = await this.stripe.subscriptions.update(subscriptionId, { + items: [ + additionalSeatsItem + ? { + id: additionalSeatsItem.id, + quantity: quantity + } + : { + price: prices.data[0].id, + quantity: quantity + } + ], + proration_behavior: 'always_invoice', + proration_date: prorationDate + }) + + // Get the latest invoice for this subscription + const invoice = await this.stripe.invoices.list({ + subscription: subscriptionId, + limit: 1 + }) + + if (invoice.data.length > 0) { + const latestInvoice = invoice.data[0] + // Only try to pay if the invoice is not already paid + if (latestInvoice.status !== 'paid') { + await this.stripe.invoices.pay(latestInvoice.id) + } + } + + return { + success: true, + subscription: updatedSubscription, + invoice: invoice.data[0] + } + } catch (error) { + console.error('Error updating additional seats:', error) + throw error + } + } + + public async getPlanProration(subscriptionId: string, newPlanId: string) { + if (!this.stripe) { + throw new Error('Stripe is not initialized') + } + + try { + const subscription = await this.stripe.subscriptions.retrieve(subscriptionId) + const customerId = subscription.customer as string + + // Get customer's credit balance and metadata + const customer = await this.stripe.customers.retrieve(customerId) + const creditBalance = (customer as Stripe.Customer).balance + const customerMetadata = (customer as Stripe.Customer).metadata || {} + + // Get the price ID for the new plan + const prices = await this.stripe.prices.list({ + product: newPlanId, + active: true, + limit: 1 + }) + + if (prices.data.length === 0) { + throw new Error('No active price found for the selected plan') + } + + const newPlan = prices.data[0] + const newPlanPrice = newPlan.unit_amount || 0 + + // Check if this is the STARTER plan and eligible for first month free + const isStarterPlan = newPlanId === process.env.CLOUD_STARTER_ID + const hasUsedFirstMonthFreeCoupon = customerMetadata.has_used_first_month_free === 'true' + const eligibleForFirstMonthFree = isStarterPlan && !hasUsedFirstMonthFreeCoupon + + // Use current timestamp for proration calculation + const prorationDate = Math.floor(Date.now() / 1000) + + const upcomingInvoice = await this.stripe.invoices.retrieveUpcoming({ + customer: customerId, + subscription: subscriptionId, + subscription_details: { + proration_behavior: 'always_invoice', + proration_date: prorationDate, + items: [ + { + id: subscription.items.data[0].id, + price: newPlan.id + } + ] + } + }) + + let prorationAmount = upcomingInvoice.lines.data.reduce((total, item) => total + item.amount, 0) + if (eligibleForFirstMonthFree) { + prorationAmount = 0 + } + + return { + newPlanAmount: newPlanPrice / 100, + prorationAmount: prorationAmount / 100, + creditBalance: creditBalance / 100, + currency: upcomingInvoice.currency.toUpperCase(), + prorationDate, + currentPeriodStart: subscription.current_period_start, + currentPeriodEnd: subscription.current_period_end, + eligibleForFirstMonthFree + } + } catch (error) { + console.error('Error calculating plan proration:', error) + throw error + } + } + + public async updateSubscriptionPlan(subscriptionId: string, newPlanId: string, prorationDate: number) { + if (!this.stripe) { + throw new Error('Stripe is not initialized') + } + + try { + const subscription = await this.stripe.subscriptions.retrieve(subscriptionId) + const customerId = subscription.customer as string + + // Get customer details and metadata + const customer = await this.stripe.customers.retrieve(customerId) + const customerMetadata = (customer as Stripe.Customer).metadata || {} + + // Get the price ID for the new plan + const prices = await this.stripe.prices.list({ + product: newPlanId, + active: true, + limit: 1 + }) + + if (prices.data.length === 0) { + throw new Error('No active price found for the selected plan') + } + + const newPlan = prices.data[0] + let updatedSubscription: Stripe.Response + + // Check if this is an upgrade to CLOUD_STARTER_ID and eligible for first month free + const isStarterPlan = newPlanId === process.env.CLOUD_STARTER_ID + const hasUsedFirstMonthFreeCoupon = customerMetadata.has_used_first_month_free === 'true' + + if (isStarterPlan && !hasUsedFirstMonthFreeCoupon) { + // Create the one-time 100% off coupon + const coupon = await this.stripe.coupons.create({ + duration: 'once', + percent_off: 100, + max_redemptions: 1, + metadata: { + type: 'first_month_free', + customer_id: customerId, + plan_id: process.env.CLOUD_STARTER_ID || '' + } + }) + + // Create a promotion code linked to the coupon + const promotionCode = await this.stripe.promotionCodes.create({ + coupon: coupon.id, + max_redemptions: 1 + }) + + // Update the subscription with the new plan and apply the promotion code + updatedSubscription = await this.stripe.subscriptions.update(subscriptionId, { + items: [ + { + id: subscription.items.data[0].id, + price: newPlan.id + } + ], + proration_behavior: 'always_invoice', + proration_date: prorationDate, + promotion_code: promotionCode.id + }) + + // Update customer metadata to mark the coupon as used + await this.stripe.customers.update(customerId, { + metadata: { + ...customerMetadata, + has_used_first_month_free: 'true', + first_month_free_date: new Date().toISOString() + } + }) + } else { + // Regular plan update without coupon + updatedSubscription = await this.stripe.subscriptions.update(subscriptionId, { + items: [ + { + id: subscription.items.data[0].id, + price: newPlan.id + } + ], + proration_behavior: 'always_invoice', + proration_date: prorationDate + }) + } + + // Get and pay the latest invoice + const invoice = await this.stripe.invoices.list({ + subscription: subscriptionId, + limit: 1 + }) + + if (invoice.data.length > 0) { + const latestInvoice = invoice.data[0] + if (latestInvoice.status !== 'paid') { + await this.stripe.invoices.pay(latestInvoice.id) + } + } + + return { + success: true, + subscription: updatedSubscription, + invoice: invoice.data[0] + } + } catch (error) { + console.error('Error updating subscription plan:', error) + throw error + } + } +} diff --git a/packages/server/src/UsageCacheManager.ts b/packages/server/src/UsageCacheManager.ts new file mode 100644 index 000000000..583fe5445 --- /dev/null +++ b/packages/server/src/UsageCacheManager.ts @@ -0,0 +1,213 @@ +import { Keyv } from 'keyv' +import KeyvRedis from '@keyv/redis' +import { Cache, createCache } from 'cache-manager' +import { MODE } from './Interface' +import { LICENSE_QUOTAS } from './utils/constants' +import { StripeManager } from './StripeManager' + +const DISABLED_QUOTAS = { + [LICENSE_QUOTAS.PREDICTIONS_LIMIT]: 0, + [LICENSE_QUOTAS.STORAGE_LIMIT]: 0, // in MB + [LICENSE_QUOTAS.FLOWS_LIMIT]: 0, + [LICENSE_QUOTAS.USERS_LIMIT]: 0, + [LICENSE_QUOTAS.ADDITIONAL_SEATS_LIMIT]: 0 +} + +const UNLIMITED_QUOTAS = { + [LICENSE_QUOTAS.PREDICTIONS_LIMIT]: -1, + [LICENSE_QUOTAS.STORAGE_LIMIT]: -1, + [LICENSE_QUOTAS.FLOWS_LIMIT]: -1, + [LICENSE_QUOTAS.USERS_LIMIT]: -1, + [LICENSE_QUOTAS.ADDITIONAL_SEATS_LIMIT]: -1 +} + +export class UsageCacheManager { + private cache: Cache + private static instance: UsageCacheManager + + public static async getInstance(): Promise { + if (!UsageCacheManager.instance) { + UsageCacheManager.instance = new UsageCacheManager() + await UsageCacheManager.instance.initialize() + } + return UsageCacheManager.instance + } + + private async initialize(): Promise { + if (process.env.MODE === MODE.QUEUE) { + let redisConfig: string | Record + if (process.env.REDIS_URL) { + redisConfig = process.env.REDIS_URL + } else { + redisConfig = { + username: process.env.REDIS_USERNAME || undefined, + password: process.env.REDIS_PASSWORD || undefined, + socket: { + host: process.env.REDIS_HOST || 'localhost', + port: parseInt(process.env.REDIS_PORT || '6379'), + tls: process.env.REDIS_TLS === 'true', + cert: process.env.REDIS_CERT ? Buffer.from(process.env.REDIS_CERT, 'base64') : undefined, + key: process.env.REDIS_KEY ? Buffer.from(process.env.REDIS_KEY, 'base64') : undefined, + ca: process.env.REDIS_CA ? Buffer.from(process.env.REDIS_CA, 'base64') : undefined + } + } + } + this.cache = createCache({ + stores: [ + new Keyv({ + store: new KeyvRedis(redisConfig) + }) + ] + }) + } else { + this.cache = createCache() + } + } + + public async getSubscriptionDetails(subscriptionId: string, withoutCache: boolean = false): Promise> { + const stripeManager = await StripeManager.getInstance() + if (!stripeManager || !subscriptionId) { + return UNLIMITED_QUOTAS + } + + // Skip cache if withoutCache is true + if (!withoutCache) { + const subscriptionData = await this.getSubscriptionDataFromCache(subscriptionId) + if (subscriptionData?.subsriptionDetails) { + return subscriptionData.subsriptionDetails + } + } + + // If not in cache, retrieve from Stripe + const subscription = await stripeManager.getStripe().subscriptions.retrieve(subscriptionId) + + // Update subscription data cache + await this.updateSubscriptionDataToCache(subscriptionId, { subsriptionDetails: stripeManager.getSubscriptionObject(subscription) }) + + return stripeManager.getSubscriptionObject(subscription) + } + + public async getQuotas(subscriptionId: string, withoutCache: boolean = false): Promise> { + const stripeManager = await StripeManager.getInstance() + if (!stripeManager || !subscriptionId) { + return UNLIMITED_QUOTAS + } + + // Skip cache if withoutCache is true + if (!withoutCache) { + const subscriptionData = await this.getSubscriptionDataFromCache(subscriptionId) + if (subscriptionData?.quotas) { + return subscriptionData.quotas + } + } + + // If not in cache, retrieve from Stripe + const subscription = await stripeManager.getStripe().subscriptions.retrieve(subscriptionId) + const items = subscription.items.data + if (items.length === 0) { + return DISABLED_QUOTAS + } + + const productId = items[0].price.product as string + const product = await stripeManager.getStripe().products.retrieve(productId) + const productMetadata = product.metadata + + if (!productMetadata || Object.keys(productMetadata).length === 0) { + return DISABLED_QUOTAS + } + + const quotas: Record = {} + for (const key in productMetadata) { + if (key.startsWith('quota:')) { + quotas[key] = parseInt(productMetadata[key]) + } + } + + const additionalSeatsItem = subscription.items.data.find( + (item) => (item.price.product as string) === process.env.ADDITIONAL_SEAT_ID + ) + quotas[LICENSE_QUOTAS.ADDITIONAL_SEATS_LIMIT] = additionalSeatsItem?.quantity || 0 + + // Update subscription data cache with quotas + await this.updateSubscriptionDataToCache(subscriptionId, { + quotas, + subsriptionDetails: stripeManager.getSubscriptionObject(subscription) + }) + + return quotas + } + + public async getSubscriptionDataFromCache(subscriptionId: string) { + const cacheKey = `subscription:${subscriptionId}` + return await this.get<{ + quotas?: Record + productId?: string + features?: Record + subsriptionDetails?: Record + }>(cacheKey) + } + + public async updateSubscriptionDataToCache( + subscriptionId: string, + data: Partial<{ + quotas: Record + productId: string + features: Record + subsriptionDetails: Record + }> + ) { + const cacheKey = `subscription:${subscriptionId}` + const existingData = (await this.getSubscriptionDataFromCache(subscriptionId)) || {} + const updatedData = { ...existingData, ...data } + this.set(cacheKey, updatedData, 3600000) // Cache for 1 hour + } + + public async get(key: string): Promise { + if (!this.cache) await this.initialize() + const value = await this.cache.get(key) + return value + } + + public async getTTL(key: string): Promise { + if (!this.cache) await this.initialize() + const value = await this.cache.ttl(key) + return value + } + + public async mget(keys: string[]): Promise<(T | null)[]> { + if (this.cache) { + const values = await this.cache.mget(keys) + return values + } else { + return [] + } + } + + public set(key: string, value: T, ttl?: number) { + if (this.cache) { + this.cache.set(key, value, ttl) + } + } + + public mset(keys: [{ key: string; value: T; ttl: number }]) { + if (this.cache) { + this.cache.mset(keys) + } + } + + public async del(key: string): Promise { + await this.cache.del(key) + } + + public async mdel(keys: string[]): Promise { + await this.cache.mdel(keys) + } + + public async clear(): Promise { + await this.cache.clear() + } + + public async wrap(key: string, fn: () => Promise, ttl?: number): Promise { + return this.cache.wrap(key, fn, ttl) + } +} diff --git a/packages/server/src/commands/base.ts b/packages/server/src/commands/base.ts index 9f0cd46f2..4795ec4b3 100644 --- a/packages/server/src/commands/base.ts +++ b/packages/server/src/commands/base.ts @@ -1,6 +1,6 @@ import { Command, Flags } from '@oclif/core' -import path from 'path' import dotenv from 'dotenv' +import path from 'path' import logger from '../utils/logger' dotenv.config({ path: path.join(__dirname, '..', '..', '.env'), override: true }) @@ -12,20 +12,17 @@ enum EXIT_CODE { export abstract class BaseCommand extends Command { static flags = { - FLOWISE_USERNAME: Flags.string(), - FLOWISE_PASSWORD: Flags.string(), FLOWISE_FILE_SIZE_LIMIT: Flags.string(), PORT: Flags.string(), CORS_ORIGINS: Flags.string(), IFRAME_ORIGINS: Flags.string(), DEBUG: Flags.string(), BLOB_STORAGE_PATH: Flags.string(), - APIKEY_STORAGE_TYPE: Flags.string(), - APIKEY_PATH: Flags.string(), LOG_PATH: Flags.string(), LOG_LEVEL: Flags.string(), TOOL_FUNCTION_BUILTIN_DEP: Flags.string(), TOOL_FUNCTION_EXTERNAL_DEP: Flags.string(), + ALLOW_BUILTIN_DEP: Flags.string(), NUMBER_OF_PROXIES: Flags.string(), DATABASE_TYPE: Flags.string(), DATABASE_PATH: Flags.string(), @@ -59,6 +56,7 @@ export abstract class BaseCommand extends Command { SECRETKEY_AWS_ACCESS_KEY: Flags.string(), SECRETKEY_AWS_SECRET_KEY: Flags.string(), SECRETKEY_AWS_REGION: Flags.string(), + SECRETKEY_AWS_NAME: Flags.string(), DISABLED_NODES: Flags.string(), MODE: Flags.string(), WORKER_CONCURRENCY: Flags.string(), @@ -76,7 +74,11 @@ export abstract class BaseCommand extends Command { REDIS_KEY: Flags.string(), REDIS_CA: Flags.string(), REDIS_KEEP_ALIVE: Flags.string(), - ENABLE_BULLMQ_DASHBOARD: Flags.string() + ENABLE_BULLMQ_DASHBOARD: Flags.string(), + CUSTOM_MCP_SECURITY_CHECK: Flags.string(), + CUSTOM_MCP_PROTOCOL: Flags.string(), + HTTP_DENY_LIST: Flags.string(), + TRUST_PROXY: Flags.string() } protected async stopProcess() { @@ -123,7 +125,7 @@ export abstract class BaseCommand extends Command { logger.error('unhandledRejection: ', err) }) - const { flags } = await this.parse(BaseCommand) + const { flags } = await this.parse(this.constructor as any) if (flags.PORT) process.env.PORT = flags.PORT if (flags.CORS_ORIGINS) process.env.CORS_ORIGINS = flags.CORS_ORIGINS if (flags.IFRAME_ORIGINS) process.env.IFRAME_ORIGINS = flags.IFRAME_ORIGINS @@ -131,14 +133,6 @@ export abstract class BaseCommand extends Command { if (flags.NUMBER_OF_PROXIES) process.env.NUMBER_OF_PROXIES = flags.NUMBER_OF_PROXIES if (flags.SHOW_COMMUNITY_NODES) process.env.SHOW_COMMUNITY_NODES = flags.SHOW_COMMUNITY_NODES if (flags.DISABLED_NODES) process.env.DISABLED_NODES = flags.DISABLED_NODES - - // Authorization - if (flags.FLOWISE_USERNAME) process.env.FLOWISE_USERNAME = flags.FLOWISE_USERNAME - if (flags.FLOWISE_PASSWORD) process.env.FLOWISE_PASSWORD = flags.FLOWISE_PASSWORD - if (flags.APIKEY_STORAGE_TYPE) process.env.APIKEY_STORAGE_TYPE = flags.APIKEY_STORAGE_TYPE - if (flags.APIKEY_PATH) process.env.APIKEY_PATH = flags.APIKEY_PATH - - // API Configuration if (flags.FLOWISE_FILE_SIZE_LIMIT) process.env.FLOWISE_FILE_SIZE_LIMIT = flags.FLOWISE_FILE_SIZE_LIMIT // Credentials @@ -148,14 +142,16 @@ export abstract class BaseCommand extends Command { if (flags.SECRETKEY_AWS_ACCESS_KEY) process.env.SECRETKEY_AWS_ACCESS_KEY = flags.SECRETKEY_AWS_ACCESS_KEY if (flags.SECRETKEY_AWS_SECRET_KEY) process.env.SECRETKEY_AWS_SECRET_KEY = flags.SECRETKEY_AWS_SECRET_KEY if (flags.SECRETKEY_AWS_REGION) process.env.SECRETKEY_AWS_REGION = flags.SECRETKEY_AWS_REGION + if (flags.SECRETKEY_AWS_NAME) process.env.SECRETKEY_AWS_NAME = flags.SECRETKEY_AWS_NAME // Logs if (flags.LOG_PATH) process.env.LOG_PATH = flags.LOG_PATH if (flags.LOG_LEVEL) process.env.LOG_LEVEL = flags.LOG_LEVEL - // Tool functions + // Custom tool/function dependencies if (flags.TOOL_FUNCTION_BUILTIN_DEP) process.env.TOOL_FUNCTION_BUILTIN_DEP = flags.TOOL_FUNCTION_BUILTIN_DEP if (flags.TOOL_FUNCTION_EXTERNAL_DEP) process.env.TOOL_FUNCTION_EXTERNAL_DEP = flags.TOOL_FUNCTION_EXTERNAL_DEP + if (flags.ALLOW_BUILTIN_DEP) process.env.ALLOW_BUILTIN_DEP = flags.ALLOW_BUILTIN_DEP // Database config if (flags.DATABASE_TYPE) process.env.DATABASE_TYPE = flags.DATABASE_TYPE @@ -210,5 +206,11 @@ export abstract class BaseCommand extends Command { if (flags.REMOVE_ON_COUNT) process.env.REMOVE_ON_COUNT = flags.REMOVE_ON_COUNT if (flags.REDIS_KEEP_ALIVE) process.env.REDIS_KEEP_ALIVE = flags.REDIS_KEEP_ALIVE if (flags.ENABLE_BULLMQ_DASHBOARD) process.env.ENABLE_BULLMQ_DASHBOARD = flags.ENABLE_BULLMQ_DASHBOARD + + // Security + if (flags.CUSTOM_MCP_SECURITY_CHECK) process.env.CUSTOM_MCP_SECURITY_CHECK = flags.CUSTOM_MCP_SECURITY_CHECK + if (flags.CUSTOM_MCP_PROTOCOL) process.env.CUSTOM_MCP_PROTOCOL = flags.CUSTOM_MCP_PROTOCOL + if (flags.HTTP_DENY_LIST) process.env.HTTP_DENY_LIST = flags.HTTP_DENY_LIST + if (flags.TRUST_PROXY) process.env.TRUST_PROXY = flags.TRUST_PROXY } } diff --git a/packages/server/src/commands/user.ts b/packages/server/src/commands/user.ts new file mode 100644 index 000000000..1eecaaa2d --- /dev/null +++ b/packages/server/src/commands/user.ts @@ -0,0 +1,80 @@ +import { Args } from '@oclif/core' +import { QueryRunner } from 'typeorm' +import * as DataSource from '../DataSource' +import { User } from '../enterprise/database/entities/user.entity' +import { getHash } from '../enterprise/utils/encryption.util' +import { isInvalidPassword } from '../enterprise/utils/validation.util' +import logger from '../utils/logger' +import { BaseCommand } from './base' + +export default class user extends BaseCommand { + static args = { + email: Args.string({ + description: 'Email address to search for in the user database' + }), + password: Args.string({ + description: 'New password for that user' + }) + } + + async run(): Promise { + const { args } = await this.parse(user) + + let queryRunner: QueryRunner | undefined + try { + logger.info('Initializing DataSource') + const dataSource = await DataSource.getDataSource() + await dataSource.initialize() + + queryRunner = dataSource.createQueryRunner() + await queryRunner.connect() + + if (args.email && args.password) { + logger.info('Running resetPassword') + await this.resetPassword(queryRunner, args.email, args.password) + } else { + logger.info('Running listUserEmails') + await this.listUserEmails(queryRunner) + } + } catch (error) { + logger.error(error) + } finally { + if (queryRunner && !queryRunner.isReleased) await queryRunner.release() + await this.gracefullyExit() + } + } + + async listUserEmails(queryRunner: QueryRunner) { + logger.info('Listing all user emails') + const users = await queryRunner.manager.find(User, { + select: ['email'] + }) + + const emails = users.map((user) => user.email) + logger.info(`Email addresses: ${emails.join(', ')}`) + logger.info(`Email count: ${emails.length}`) + logger.info('To reset user password, run the following command: pnpm user --email "myEmail" --password "myPassword"') + } + + async resetPassword(queryRunner: QueryRunner, email: string, password: string) { + logger.info(`Finding user by email: ${email}`) + const user = await queryRunner.manager.findOne(User, { + where: { email } + }) + if (!user) throw new Error(`User not found with email: ${email}`) + + if (isInvalidPassword(password)) { + const errors = [] + if (!/(?=.*[a-z])/.test(password)) errors.push('at least one lowercase letter') + if (!/(?=.*[A-Z])/.test(password)) errors.push('at least one uppercase letter') + if (!/(?=.*\d)/.test(password)) errors.push('at least one number') + if (!/(?=.*[^a-zA-Z0-9])/.test(password)) errors.push('at least one special character') + if (password.length < 8) errors.push('minimum length of 8 characters') + throw new Error(`Invalid password: Must contain ${errors.join(', ')}`) + } + + user.credential = getHash(password) + await queryRunner.manager.save(user) + logger.info(`Password reset for user: ${email}`) + } +} diff --git a/packages/server/src/commands/worker.ts b/packages/server/src/commands/worker.ts index ce0a30439..e993c7360 100644 --- a/packages/server/src/commands/worker.ts +++ b/packages/server/src/commands/worker.ts @@ -7,6 +7,7 @@ import { NodesPool } from '../NodesPool' import { CachePool } from '../CachePool' import { QueueEvents, QueueEventsListener } from 'bullmq' import { AbortControllerPool } from '../AbortControllerPool' +import { UsageCacheManager } from '../UsageCacheManager' interface CustomListener extends QueueEventsListener { abort: (args: { id: string }, id: string) => void @@ -19,7 +20,7 @@ export default class Worker extends BaseCommand { async run(): Promise { logger.info('Starting Flowise Worker...') - const { appDataSource, telemetry, componentNodes, cachePool, abortControllerPool } = await this.prepareData() + const { appDataSource, telemetry, componentNodes, cachePool, abortControllerPool, usageCacheManager } = await this.prepareData() const queueManager = QueueManager.getInstance() queueManager.setupAllQueues({ @@ -27,7 +28,8 @@ export default class Worker extends BaseCommand { telemetry, cachePool, appDataSource, - abortControllerPool + abortControllerPool, + usageCacheManager }) /** Prediction */ @@ -72,7 +74,10 @@ export default class Worker extends BaseCommand { // Initialize cache pool const cachePool = new CachePool() - return { appDataSource, telemetry, componentNodes: nodesPool.componentNodes, cachePool, abortControllerPool } + // Initialize usage cache manager + const usageCacheManager = await UsageCacheManager.getInstance() + + return { appDataSource, telemetry, componentNodes: nodesPool.componentNodes, cachePool, abortControllerPool, usageCacheManager } } async catch(error: Error) { diff --git a/packages/server/src/controllers/apikey/index.ts b/packages/server/src/controllers/apikey/index.ts index 40452b719..677d68931 100644 --- a/packages/server/src/controllers/apikey/index.ts +++ b/packages/server/src/controllers/apikey/index.ts @@ -2,11 +2,17 @@ import { Request, Response, NextFunction } from 'express' import { StatusCodes } from 'http-status-codes' import { InternalFlowiseError } from '../../errors/internalFlowiseError' import apikeyService from '../../services/apikey' +import { getPageAndLimitParams } from '../../utils/pagination' // Get api keys const getAllApiKeys = async (req: Request, res: Response, next: NextFunction) => { try { - const apiResponse = await apikeyService.getAllApiKeys() + const autoCreateNewKey = true + const { page, limit } = getPageAndLimitParams(req) + if (!req.user?.activeWorkspaceId) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Workspace ID is required`) + } + const apiResponse = await apikeyService.getAllApiKeys(req.user?.activeWorkspaceId, autoCreateNewKey, page, limit) return res.json(apiResponse) } catch (error) { next(error) @@ -18,7 +24,10 @@ const createApiKey = async (req: Request, res: Response, next: NextFunction) => if (typeof req.body === 'undefined' || !req.body.keyName) { throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: apikeyController.createApiKey - keyName not provided!`) } - const apiResponse = await apikeyService.createApiKey(req.body.keyName) + if (!req.user?.activeWorkspaceId) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Workspace ID is required`) + } + const apiResponse = await apikeyService.createApiKey(req.body.keyName, req.user?.activeWorkspaceId) return res.json(apiResponse) } catch (error) { next(error) @@ -34,7 +43,10 @@ const updateApiKey = async (req: Request, res: Response, next: NextFunction) => if (typeof req.body === 'undefined' || !req.body.keyName) { throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: apikeyController.updateApiKey - keyName not provided!`) } - const apiResponse = await apikeyService.updateApiKey(req.params.id, req.body.keyName) + if (!req.user?.activeWorkspaceId) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Workspace ID is required`) + } + const apiResponse = await apikeyService.updateApiKey(req.params.id, req.body.keyName, req.user?.activeWorkspaceId) return res.json(apiResponse) } catch (error) { next(error) @@ -47,6 +59,10 @@ const importKeys = async (req: Request, res: Response, next: NextFunction) => { if (typeof req.body === 'undefined' || !req.body.jsonFile) { throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: apikeyController.importKeys - body not provided!`) } + if (!req.user?.activeWorkspaceId) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Workspace ID is required`) + } + req.body.workspaceId = req.user?.activeWorkspaceId const apiResponse = await apikeyService.importKeys(req.body) return res.json(apiResponse) } catch (error) { @@ -60,7 +76,10 @@ const deleteApiKey = async (req: Request, res: Response, next: NextFunction) => if (typeof req.params === 'undefined' || !req.params.id) { throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: apikeyController.deleteApiKey - id not provided!`) } - const apiResponse = await apikeyService.deleteApiKey(req.params.id) + if (!req.user?.activeWorkspaceId) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Workspace ID is required`) + } + const apiResponse = await apikeyService.deleteApiKey(req.params.id, req.user?.activeWorkspaceId) return res.json(apiResponse) } catch (error) { next(error) diff --git a/packages/server/src/controllers/assistants/index.ts b/packages/server/src/controllers/assistants/index.ts index 2180a4c7f..e4159bf3f 100644 --- a/packages/server/src/controllers/assistants/index.ts +++ b/packages/server/src/controllers/assistants/index.ts @@ -1,8 +1,10 @@ -import { Request, Response, NextFunction } from 'express' -import assistantsService from '../../services/assistants' -import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { NextFunction, Request, Response } from 'express' import { StatusCodes } from 'http-status-codes' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' import { AssistantType } from '../../Interface' +import assistantsService from '../../services/assistants' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { checkUsageLimit } from '../../utils/quotaUsage' const createAssistant = async (req: Request, res: Response, next: NextFunction) => { try { @@ -12,7 +14,30 @@ const createAssistant = async (req: Request, res: Response, next: NextFunction) `Error: assistantsController.createAssistant - body not provided!` ) } - const apiResponse = await assistantsService.createAssistant(req.body) + const body = req.body + const orgId = req.user?.activeOrganizationId + if (!orgId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: assistantsController.createAssistant - organization ${orgId} not found!` + ) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: assistantsController.createAssistant - workspace ${workspaceId} not found!` + ) + } + const subscriptionId = req.user?.activeOrganizationSubscriptionId || '' + + const existingAssistantCount = await assistantsService.getAssistantsCountByOrganization(body.type, orgId) + const newAssistantCount = 1 + await checkUsageLimit('flows', subscriptionId, getRunningExpressApp().usageCacheManager, existingAssistantCount + newAssistantCount) + + body.workspaceId = workspaceId + const apiResponse = await assistantsService.createAssistant(body, orgId) + return res.json(apiResponse) } catch (error) { next(error) @@ -27,7 +52,14 @@ const deleteAssistant = async (req: Request, res: Response, next: NextFunction) `Error: assistantsController.deleteAssistant - id not provided!` ) } - const apiResponse = await assistantsService.deleteAssistant(req.params.id, req.query.isDeleteBoth) + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: assistantsController.deleteAssistant - workspace ${workspaceId} not found!` + ) + } + const apiResponse = await assistantsService.deleteAssistant(req.params.id, req.query.isDeleteBoth, workspaceId) return res.json(apiResponse) } catch (error) { next(error) @@ -37,7 +69,14 @@ const deleteAssistant = async (req: Request, res: Response, next: NextFunction) const getAllAssistants = async (req: Request, res: Response, next: NextFunction) => { try { const type = req.query.type as AssistantType - const apiResponse = await assistantsService.getAllAssistants(type) + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: assistantsController.getAllAssistants - workspace ${workspaceId} not found!` + ) + } + const apiResponse = await assistantsService.getAllAssistants(workspaceId, type) return res.json(apiResponse) } catch (error) { next(error) @@ -52,7 +91,14 @@ const getAssistantById = async (req: Request, res: Response, next: NextFunction) `Error: assistantsController.getAssistantById - id not provided!` ) } - const apiResponse = await assistantsService.getAssistantById(req.params.id) + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: assistantsController.getAssistantById - workspace ${workspaceId} not found!` + ) + } + const apiResponse = await assistantsService.getAssistantById(req.params.id, workspaceId) return res.json(apiResponse) } catch (error) { next(error) @@ -73,7 +119,14 @@ const updateAssistant = async (req: Request, res: Response, next: NextFunction) `Error: assistantsController.updateAssistant - body not provided!` ) } - const apiResponse = await assistantsService.updateAssistant(req.params.id, req.body) + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: assistantsController.updateAssistant - workspace ${workspaceId} not found!` + ) + } + const apiResponse = await assistantsService.updateAssistant(req.params.id, req.body, workspaceId) return res.json(apiResponse) } catch (error) { next(error) @@ -91,7 +144,14 @@ const getChatModels = async (req: Request, res: Response, next: NextFunction) => const getDocumentStores = async (req: Request, res: Response, next: NextFunction) => { try { - const apiResponse = await assistantsService.getDocumentStores() + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: assistantsController.getDocumentStores - workspace ${workspaceId} not found!` + ) + } + const apiResponse = await assistantsService.getDocumentStores(workspaceId) return res.json(apiResponse) } catch (error) { next(error) diff --git a/packages/server/src/controllers/chat-messages/index.ts b/packages/server/src/controllers/chat-messages/index.ts index d5c339f83..d1d7cccba 100644 --- a/packages/server/src/controllers/chat-messages/index.ts +++ b/packages/server/src/controllers/chat-messages/index.ts @@ -9,6 +9,7 @@ import { ChatMessage } from '../../database/entities/ChatMessage' import { InternalFlowiseError } from '../../errors/internalFlowiseError' import { StatusCodes } from 'http-status-codes' import { utilGetChatMessage } from '../../utils/getChatMessage' +import { getPageAndLimitParams } from '../../utils/pagination' const getFeedbackTypeFilters = (_feedbackTypeFilters: ChatMessageRatingType[]): ChatMessageRatingType[] | undefined => { try { @@ -62,6 +63,7 @@ const getAllChatMessages = async (req: Request, res: Response, next: NextFunctio chatTypes = [_chatTypes as ChatType] } } + const activeWorkspaceId = req.user?.activeWorkspaceId const sortOrder = req.query?.order as string | undefined const chatId = req.query?.chatId as string | undefined const memoryType = req.query?.memoryType as string | undefined @@ -70,6 +72,9 @@ const getAllChatMessages = async (req: Request, res: Response, next: NextFunctio const startDate = req.query?.startDate as string | undefined const endDate = req.query?.endDate as string | undefined const feedback = req.query?.feedback as boolean | undefined + + const { page, limit } = getPageAndLimitParams(req) + let feedbackTypeFilters = req.query?.feedbackType as ChatMessageRatingType[] | undefined if (feedbackTypeFilters) { feedbackTypeFilters = getFeedbackTypeFilters(feedbackTypeFilters) @@ -91,9 +96,11 @@ const getAllChatMessages = async (req: Request, res: Response, next: NextFunctio endDate, messageId, feedback, - feedbackTypeFilters + feedbackTypeFilters, + activeWorkspaceId, + page, + limit ) - return res.json(parseAPIResponse(apiResponse)) } catch (error) { next(error) @@ -102,6 +109,7 @@ const getAllChatMessages = async (req: Request, res: Response, next: NextFunctio const getAllInternalChatMessages = async (req: Request, res: Response, next: NextFunction) => { try { + const activeWorkspaceId = req.user?.activeWorkspaceId const sortOrder = req.query?.order as string | undefined const chatId = req.query?.chatId as string | undefined const memoryType = req.query?.memoryType as string | undefined @@ -125,7 +133,8 @@ const getAllInternalChatMessages = async (req: Request, res: Response, next: Nex endDate, messageId, feedback, - feedbackTypeFilters + feedbackTypeFilters, + activeWorkspaceId ) return res.json(parseAPIResponse(apiResponse)) } catch (error) { @@ -142,8 +151,22 @@ const removeAllChatMessages = async (req: Request, res: Response, next: NextFunc 'Error: chatMessagesController.removeAllChatMessages - id not provided!' ) } + const orgId = req.user?.activeOrganizationId + if (!orgId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: chatMessagesController.removeAllChatMessages - organization ${orgId} not found!` + ) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: chatMessagesController.removeAllChatMessages - workspace ${workspaceId} not found!` + ) + } const chatflowid = req.params.id - const chatflow = await chatflowsService.getChatflowById(req.params.id) + const chatflow = await chatflowsService.getChatflowById(req.params.id, workspaceId) if (!chatflow) { return res.status(404).send(`Chatflow ${req.params.id} not found`) } @@ -177,6 +200,7 @@ const removeAllChatMessages = async (req: Request, res: Response, next: NextFunc if (!chatId) { const isFeedback = feedbackTypeFilters?.length ? true : false const hardDelete = req.query?.hardDelete as boolean | undefined + const messages = await utilGetChatMessage({ chatflowid, chatTypes, @@ -184,7 +208,8 @@ const removeAllChatMessages = async (req: Request, res: Response, next: NextFunc startDate, endDate, feedback: isFeedback, - feedbackTypes: feedbackTypeFilters + feedbackTypes: feedbackTypeFilters, + activeWorkspaceId: workspaceId }) const messageIds = messages.map((message) => message.id) @@ -216,6 +241,7 @@ const removeAllChatMessages = async (req: Request, res: Response, next: NextFunc appServer.nodesPool.componentNodes, chatId, appServer.AppDataSource, + orgId, sessionId, memoryType, isClearFromViewMessageDialog @@ -226,7 +252,14 @@ const removeAllChatMessages = async (req: Request, res: Response, next: NextFunc } } - const apiResponse = await chatMessagesService.removeChatMessagesByMessageIds(chatflowid, chatIdMap, messageIds) + const apiResponse = await chatMessagesService.removeChatMessagesByMessageIds( + chatflowid, + chatIdMap, + messageIds, + orgId, + workspaceId, + appServer.usageCacheManager + ) return res.json(apiResponse) } else { try { @@ -235,6 +268,7 @@ const removeAllChatMessages = async (req: Request, res: Response, next: NextFunc appServer.nodesPool.componentNodes, chatId, appServer.AppDataSource, + orgId, sessionId, memoryType, isClearFromViewMessageDialog @@ -255,7 +289,14 @@ const removeAllChatMessages = async (req: Request, res: Response, next: NextFunc const toDate = new Date(endDate) deleteOptions.createdDate = Between(fromDate ?? aMonthAgo(), toDate ?? new Date()) } - const apiResponse = await chatMessagesService.removeAllChatMessages(chatId, chatflowid, deleteOptions) + const apiResponse = await chatMessagesService.removeAllChatMessages( + chatId, + chatflowid, + deleteOptions, + orgId, + workspaceId, + appServer.usageCacheManager + ) return res.json(apiResponse) } } catch (error) { @@ -282,26 +323,30 @@ const parseAPIResponse = (apiResponse: ChatMessage | ChatMessage[]): ChatMessage const parseResponse = (response: ChatMessage): ChatMessage => { const parsedResponse = { ...response } - if (parsedResponse.sourceDocuments) { - parsedResponse.sourceDocuments = JSON.parse(parsedResponse.sourceDocuments) - } - if (parsedResponse.usedTools) { - parsedResponse.usedTools = JSON.parse(parsedResponse.usedTools) - } - if (parsedResponse.fileAnnotations) { - parsedResponse.fileAnnotations = JSON.parse(parsedResponse.fileAnnotations) - } - if (parsedResponse.agentReasoning) { - parsedResponse.agentReasoning = JSON.parse(parsedResponse.agentReasoning) - } - if (parsedResponse.fileUploads) { - parsedResponse.fileUploads = JSON.parse(parsedResponse.fileUploads) - } - if (parsedResponse.action) { - parsedResponse.action = JSON.parse(parsedResponse.action) - } - if (parsedResponse.artifacts) { - parsedResponse.artifacts = JSON.parse(parsedResponse.artifacts) + try { + if (parsedResponse.sourceDocuments) { + parsedResponse.sourceDocuments = JSON.parse(parsedResponse.sourceDocuments) + } + if (parsedResponse.usedTools) { + parsedResponse.usedTools = JSON.parse(parsedResponse.usedTools) + } + if (parsedResponse.fileAnnotations) { + parsedResponse.fileAnnotations = JSON.parse(parsedResponse.fileAnnotations) + } + if (parsedResponse.agentReasoning) { + parsedResponse.agentReasoning = JSON.parse(parsedResponse.agentReasoning) + } + if (parsedResponse.fileUploads) { + parsedResponse.fileUploads = JSON.parse(parsedResponse.fileUploads) + } + if (parsedResponse.action) { + parsedResponse.action = JSON.parse(parsedResponse.action) + } + if (parsedResponse.artifacts) { + parsedResponse.artifacts = JSON.parse(parsedResponse.artifacts) + } + } catch (e) { + console.error('Error parsing chat message response', e) } return parsedResponse diff --git a/packages/server/src/controllers/chatflows/index.ts b/packages/server/src/controllers/chatflows/index.ts index 930272cc5..6b1a554ca 100644 --- a/packages/server/src/controllers/chatflows/index.ts +++ b/packages/server/src/controllers/chatflows/index.ts @@ -1,18 +1,24 @@ import { NextFunction, Request, Response } from 'express' import { StatusCodes } from 'http-status-codes' -import apiKeyService from '../../services/apikey' import { ChatFlow } from '../../database/entities/ChatFlow' -import { RateLimiterManager } from '../../utils/rateLimit' import { InternalFlowiseError } from '../../errors/internalFlowiseError' import { ChatflowType } from '../../Interface' +import apiKeyService from '../../services/apikey' import chatflowsService from '../../services/chatflows' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { checkUsageLimit } from '../../utils/quotaUsage' +import { RateLimiterManager } from '../../utils/rateLimit' +import { getPageAndLimitParams } from '../../utils/pagination' +import { WorkspaceUserErrorMessage, WorkspaceUserService } from '../../enterprise/services/workspace-user.service' +import { QueryRunner } from 'typeorm' +import { GeneralErrorMessage } from '../../utils/constants' const checkIfChatflowIsValidForStreaming = async (req: Request, res: Response, next: NextFunction) => { try { if (typeof req.params === 'undefined' || !req.params.id) { throw new InternalFlowiseError( StatusCodes.PRECONDITION_FAILED, - `Error: chatflowsRouter.checkIfChatflowIsValidForStreaming - id not provided!` + `Error: chatflowsController.checkIfChatflowIsValidForStreaming - id not provided!` ) } const apiResponse = await chatflowsService.checkIfChatflowIsValidForStreaming(req.params.id) @@ -27,7 +33,7 @@ const checkIfChatflowIsValidForUploads = async (req: Request, res: Response, nex if (typeof req.params === 'undefined' || !req.params.id) { throw new InternalFlowiseError( StatusCodes.PRECONDITION_FAILED, - `Error: chatflowsRouter.checkIfChatflowIsValidForUploads - id not provided!` + `Error: chatflowsController.checkIfChatflowIsValidForUploads - id not provided!` ) } const apiResponse = await chatflowsService.checkIfChatflowIsValidForUploads(req.params.id) @@ -40,9 +46,23 @@ const checkIfChatflowIsValidForUploads = async (req: Request, res: Response, nex const deleteChatflow = async (req: Request, res: Response, next: NextFunction) => { try { if (typeof req.params === 'undefined' || !req.params.id) { - throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: chatflowsRouter.deleteChatflow - id not provided!`) + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: chatflowsController.deleteChatflow - id not provided!`) } - const apiResponse = await chatflowsService.deleteChatflow(req.params.id) + const orgId = req.user?.activeOrganizationId + if (!orgId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: chatflowsController.deleteChatflow - organization ${orgId} not found!` + ) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: chatflowsController.deleteChatflow - workspace ${workspaceId} not found!` + ) + } + const apiResponse = await chatflowsService.deleteChatflow(req.params.id, orgId, workspaceId) return res.json(apiResponse) } catch (error) { next(error) @@ -51,7 +71,14 @@ const deleteChatflow = async (req: Request, res: Response, next: NextFunction) = const getAllChatflows = async (req: Request, res: Response, next: NextFunction) => { try { - const apiResponse = await chatflowsService.getAllChatflows(req.query?.type as ChatflowType) + const { page, limit } = getPageAndLimitParams(req) + + const apiResponse = await chatflowsService.getAllChatflows( + req.query?.type as ChatflowType, + req.user?.activeWorkspaceId, + page, + limit + ) return res.json(apiResponse) } catch (error) { next(error) @@ -64,7 +91,7 @@ const getChatflowByApiKey = async (req: Request, res: Response, next: NextFuncti if (typeof req.params === 'undefined' || !req.params.apikey) { throw new InternalFlowiseError( StatusCodes.PRECONDITION_FAILED, - `Error: chatflowsRouter.getChatflowByApiKey - apikey not provided!` + `Error: chatflowsController.getChatflowByApiKey - apikey not provided!` ) } const apikey = await apiKeyService.getApiKey(req.params.apikey) @@ -81,9 +108,16 @@ const getChatflowByApiKey = async (req: Request, res: Response, next: NextFuncti const getChatflowById = async (req: Request, res: Response, next: NextFunction) => { try { if (typeof req.params === 'undefined' || !req.params.id) { - throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: chatflowsRouter.getChatflowById - id not provided!`) + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: chatflowsController.getChatflowById - id not provided!`) } - const apiResponse = await chatflowsService.getChatflowById(req.params.id) + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: chatflowsController.getChatflowById - workspace ${workspaceId} not found!` + ) + } + const apiResponse = await chatflowsService.getChatflowById(req.params.id, workspaceId) return res.json(apiResponse) } catch (error) { next(error) @@ -93,22 +127,40 @@ const getChatflowById = async (req: Request, res: Response, next: NextFunction) const saveChatflow = async (req: Request, res: Response, next: NextFunction) => { try { if (!req.body) { - throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: chatflowsRouter.saveChatflow - body not provided!`) + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: chatflowsController.saveChatflow - body not provided!`) } + const orgId = req.user?.activeOrganizationId + if (!orgId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: chatflowsController.saveChatflow - organization ${orgId} not found!` + ) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: chatflowsController.saveChatflow - workspace ${workspaceId} not found!` + ) + } + const subscriptionId = req.user?.activeOrganizationSubscriptionId || '' const body = req.body + + const existingChatflowCount = await chatflowsService.getAllChatflowsCountByOrganization(body.type, orgId) + const newChatflowCount = 1 + await checkUsageLimit('flows', subscriptionId, getRunningExpressApp().usageCacheManager, existingChatflowCount + newChatflowCount) + const newChatFlow = new ChatFlow() Object.assign(newChatFlow, body) - const apiResponse = await chatflowsService.saveChatflow(newChatFlow) - return res.json(apiResponse) - } catch (error) { - next(error) - } -} + newChatFlow.workspaceId = workspaceId + const apiResponse = await chatflowsService.saveChatflow( + newChatFlow, + orgId, + workspaceId, + subscriptionId, + getRunningExpressApp().usageCacheManager + ) -const importChatflows = async (req: Request, res: Response, next: NextFunction) => { - try { - const chatflows: Partial[] = req.body.Chatflows - const apiResponse = await chatflowsService.importChatflows(chatflows) return res.json(apiResponse) } catch (error) { next(error) @@ -118,13 +170,27 @@ const importChatflows = async (req: Request, res: Response, next: NextFunction) const updateChatflow = async (req: Request, res: Response, next: NextFunction) => { try { if (typeof req.params === 'undefined' || !req.params.id) { - throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: chatflowsRouter.updateChatflow - id not provided!`) + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: chatflowsController.updateChatflow - id not provided!`) } - const chatflow = await chatflowsService.getChatflowById(req.params.id) + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: chatflowsController.saveChatflow - workspace ${workspaceId} not found!` + ) + } + const chatflow = await chatflowsService.getChatflowById(req.params.id, workspaceId) if (!chatflow) { return res.status(404).send(`Chatflow ${req.params.id} not found`) } - + const orgId = req.user?.activeOrganizationId + if (!orgId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: chatflowsController.saveChatflow - organization ${orgId} not found!` + ) + } + const subscriptionId = req.user?.activeOrganizationSubscriptionId || '' const body = req.body const updateChatFlow = new ChatFlow() Object.assign(updateChatFlow, body) @@ -133,7 +199,7 @@ const updateChatflow = async (req: Request, res: Response, next: NextFunction) = const rateLimiterManager = RateLimiterManager.getInstance() await rateLimiterManager.updateRateLimiter(updateChatFlow) - const apiResponse = await chatflowsService.updateChatflow(chatflow, updateChatFlow) + const apiResponse = await chatflowsService.updateChatflow(chatflow, updateChatFlow, orgId, workspaceId, subscriptionId) return res.json(apiResponse) } catch (error) { next(error) @@ -141,17 +207,31 @@ const updateChatflow = async (req: Request, res: Response, next: NextFunction) = } const getSinglePublicChatflow = async (req: Request, res: Response, next: NextFunction) => { + let queryRunner: QueryRunner | undefined try { if (typeof req.params === 'undefined' || !req.params.id) { throw new InternalFlowiseError( StatusCodes.PRECONDITION_FAILED, - `Error: chatflowsRouter.getSinglePublicChatflow - id not provided!` + `Error: chatflowsController.getSinglePublicChatflow - id not provided!` ) } - const apiResponse = await chatflowsService.getSinglePublicChatflow(req.params.id) - return res.json(apiResponse) + const chatflow = await chatflowsService.getChatflowById(req.params.id) + if (!chatflow) return res.status(StatusCodes.NOT_FOUND).json({ message: 'Chatflow not found' }) + if (chatflow.isPublic) return res.status(StatusCodes.OK).json(chatflow) + if (!req.user) return res.status(StatusCodes.UNAUTHORIZED).json({ message: GeneralErrorMessage.UNAUTHORIZED }) + queryRunner = getRunningExpressApp().AppDataSource.createQueryRunner() + const workspaceUserService = new WorkspaceUserService() + const workspaceUser = await workspaceUserService.readWorkspaceUserByUserId(req.user.id, queryRunner) + if (workspaceUser.length === 0) + return res.status(StatusCodes.NOT_FOUND).json({ message: WorkspaceUserErrorMessage.WORKSPACE_USER_NOT_FOUND }) + const workspaceIds = workspaceUser.map((user) => user.workspaceId) + if (!workspaceIds.includes(chatflow.workspaceId)) + return res.status(StatusCodes.BAD_REQUEST).json({ message: 'You are not in the workspace that owns this chatflow' }) + return res.status(StatusCodes.OK).json(chatflow) } catch (error) { next(error) + } finally { + if (queryRunner) await queryRunner.release() } } @@ -160,7 +240,7 @@ const getSinglePublicChatbotConfig = async (req: Request, res: Response, next: N if (typeof req.params === 'undefined' || !req.params.id) { throw new InternalFlowiseError( StatusCodes.PRECONDITION_FAILED, - `Error: chatflowsRouter.getSinglePublicChatbotConfig - id not provided!` + `Error: chatflowsController.getSinglePublicChatbotConfig - id not provided!` ) } const apiResponse = await chatflowsService.getSinglePublicChatbotConfig(req.params.id) @@ -170,6 +250,27 @@ const getSinglePublicChatbotConfig = async (req: Request, res: Response, next: N } } +const checkIfChatflowHasChanged = async (req: Request, res: Response, next: NextFunction) => { + try { + if (typeof req.params === 'undefined' || !req.params.id) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: chatflowsController.checkIfChatflowHasChanged - id not provided!` + ) + } + if (!req.params.lastUpdatedDateTime) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: chatflowsController.checkIfChatflowHasChanged - lastUpdatedDateTime not provided!` + ) + } + const apiResponse = await chatflowsService.checkIfChatflowHasChanged(req.params.id, req.params.lastUpdatedDateTime) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + export default { checkIfChatflowIsValidForStreaming, checkIfChatflowIsValidForUploads, @@ -178,8 +279,8 @@ export default { getChatflowByApiKey, getChatflowById, saveChatflow, - importChatflows, updateChatflow, getSinglePublicChatflow, - getSinglePublicChatbotConfig + getSinglePublicChatbotConfig, + checkIfChatflowHasChanged } diff --git a/packages/server/src/controllers/credentials/index.ts b/packages/server/src/controllers/credentials/index.ts index ad9374430..6c6dea22a 100644 --- a/packages/server/src/controllers/credentials/index.ts +++ b/packages/server/src/controllers/credentials/index.ts @@ -11,7 +11,9 @@ const createCredential = async (req: Request, res: Response, next: NextFunction) `Error: credentialsController.createCredential - body not provided!` ) } - const apiResponse = await credentialsService.createCredential(req.body) + const body = req.body + body.workspaceId = req.user?.activeWorkspaceId + const apiResponse = await credentialsService.createCredential(body) return res.json(apiResponse) } catch (error) { next(error) @@ -26,7 +28,14 @@ const deleteCredentials = async (req: Request, res: Response, next: NextFunction `Error: credentialsController.deleteCredentials - id not provided!` ) } - const apiResponse = await credentialsService.deleteCredentials(req.params.id) + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: credentialsController.deleteCredentials - workspace ${workspaceId} not found!` + ) + } + const apiResponse = await credentialsService.deleteCredentials(req.params.id, workspaceId) return res.json(apiResponse) } catch (error) { next(error) @@ -35,7 +44,14 @@ const deleteCredentials = async (req: Request, res: Response, next: NextFunction const getAllCredentials = async (req: Request, res: Response, next: NextFunction) => { try { - const apiResponse = await credentialsService.getAllCredentials(req.query.credentialName) + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: credentialsController.getAllCredentials - workspace ${workspaceId} not found!` + ) + } + const apiResponse = await credentialsService.getAllCredentials(req.query.credentialName, workspaceId) return res.json(apiResponse) } catch (error) { next(error) @@ -50,7 +66,14 @@ const getCredentialById = async (req: Request, res: Response, next: NextFunction `Error: credentialsController.getCredentialById - id not provided!` ) } - const apiResponse = await credentialsService.getCredentialById(req.params.id) + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: credentialsController.getCredentialById - workspace ${workspaceId} not found!` + ) + } + const apiResponse = await credentialsService.getCredentialById(req.params.id, workspaceId) return res.json(apiResponse) } catch (error) { next(error) @@ -71,7 +94,14 @@ const updateCredential = async (req: Request, res: Response, next: NextFunction) `Error: credentialsController.updateCredential - body not provided!` ) } - const apiResponse = await credentialsService.updateCredential(req.params.id, req.body) + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: credentialsController.updateCredential - workspace ${workspaceId} not found!` + ) + } + const apiResponse = await credentialsService.updateCredential(req.params.id, req.body, workspaceId) return res.json(apiResponse) } catch (error) { next(error) diff --git a/packages/server/src/controllers/dataset/index.ts b/packages/server/src/controllers/dataset/index.ts new file mode 100644 index 000000000..0479a20de --- /dev/null +++ b/packages/server/src/controllers/dataset/index.ts @@ -0,0 +1,217 @@ +import { Request, Response, NextFunction } from 'express' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import datasetService from '../../services/dataset' +import { StatusCodes } from 'http-status-codes' +import { getPageAndLimitParams } from '../../utils/pagination' + +const getAllDatasets = async (req: Request, res: Response, next: NextFunction) => { + try { + const { page, limit } = getPageAndLimitParams(req) + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: datasetController.getAllDatasets - workspace ${workspaceId} not found!` + ) + } + const apiResponse = await datasetService.getAllDatasets(workspaceId, page, limit) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const getDataset = async (req: Request, res: Response, next: NextFunction) => { + try { + if (typeof req.params === 'undefined' || !req.params.id) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: datasetService.getDataset - id not provided!`) + } + const { page, limit } = getPageAndLimitParams(req) + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: datasetController.getDataset - workspace ${workspaceId} not found!` + ) + } + const apiResponse = await datasetService.getDataset(req.params.id, workspaceId, page, limit) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const createDataset = async (req: Request, res: Response, next: NextFunction) => { + try { + if (!req.body) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: datasetService.createDataset - body not provided!`) + } + const body = req.body + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: datasetController.createDataset - workspace ${workspaceId} not found!` + ) + } + body.workspaceId = workspaceId + const apiResponse = await datasetService.createDataset(body) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const updateDataset = async (req: Request, res: Response, next: NextFunction) => { + try { + if (!req.body) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: datasetService.updateDataset - body not provided!`) + } + if (typeof req.params === 'undefined' || !req.params.id) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: datasetService.updateDataset - id not provided!`) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: datasetController.updateDataset - workspace ${workspaceId} not found!` + ) + } + const apiResponse = await datasetService.updateDataset(req.params.id, req.body, workspaceId) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const deleteDataset = async (req: Request, res: Response, next: NextFunction) => { + try { + if (typeof req.params === 'undefined' || !req.params.id) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: datasetService.deleteDataset - id not provided!`) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: datasetController.deleteDataset - workspace ${workspaceId} not found!` + ) + } + const apiResponse = await datasetService.deleteDataset(req.params.id, workspaceId) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const addDatasetRow = async (req: Request, res: Response, next: NextFunction) => { + try { + if (!req.body) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: datasetService.addDatasetRow - body not provided!`) + } + if (!req.body.datasetId) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: datasetService.addDatasetRow - datasetId not provided!`) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: datasetController.addDatasetRow - workspace ${workspaceId} not found!` + ) + } + req.body.workspaceId = workspaceId + const apiResponse = await datasetService.addDatasetRow(req.body) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const updateDatasetRow = async (req: Request, res: Response, next: NextFunction) => { + try { + if (!req.body) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: datasetService.updateDatasetRow - body not provided!`) + } + if (typeof req.params === 'undefined' || !req.params.id) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: datasetService.updateDatasetRow - id not provided!`) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: datasetController.updateDatasetRow - workspace ${workspaceId} not found!` + ) + } + req.body.workspaceId = workspaceId + const apiResponse = await datasetService.updateDatasetRow(req.params.id, req.body) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const deleteDatasetRow = async (req: Request, res: Response, next: NextFunction) => { + try { + if (typeof req.params === 'undefined' || !req.params.id) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: datasetService.deleteDatasetRow - id not provided!`) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: datasetController.deleteDatasetRow - workspace ${workspaceId} not found!` + ) + } + const apiResponse = await datasetService.deleteDatasetRow(req.params.id, workspaceId) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const patchDeleteRows = async (req: Request, res: Response, next: NextFunction) => { + try { + const ids = req.body.ids ?? [] + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: datasetController.patchDeleteRows - workspace ${workspaceId} not found!` + ) + } + const apiResponse = await datasetService.patchDeleteRows(ids, workspaceId) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const reorderDatasetRow = async (req: Request, res: Response, next: NextFunction) => { + try { + if (!req.body) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: datasetService.reorderDatasetRow - body not provided!`) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: datasetController.reorderDatasetRow - workspace ${workspaceId} not found!` + ) + } + const apiResponse = await datasetService.reorderDatasetRow(req.body.datasetId, req.body.rows, workspaceId) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} +export default { + getAllDatasets, + getDataset, + createDataset, + updateDataset, + deleteDataset, + addDatasetRow, + updateDatasetRow, + deleteDatasetRow, + patchDeleteRows, + reorderDatasetRow +} diff --git a/packages/server/src/controllers/documentstore/index.ts b/packages/server/src/controllers/documentstore/index.ts index 36b1402e1..1ac4f4395 100644 --- a/packages/server/src/controllers/documentstore/index.ts +++ b/packages/server/src/controllers/documentstore/index.ts @@ -6,6 +6,7 @@ import { InternalFlowiseError } from '../../errors/internalFlowiseError' import { DocumentStoreDTO } from '../../Interface' import { getRunningExpressApp } from '../../utils/getRunningExpressApp' import { FLOWISE_COUNTER_STATUS, FLOWISE_METRIC_COUNTERS } from '../../Interface.Metrics' +import { getPageAndLimitParams } from '../../utils/pagination' const createDocumentStore = async (req: Request, res: Response, next: NextFunction) => { try { @@ -15,9 +16,25 @@ const createDocumentStore = async (req: Request, res: Response, next: NextFuncti `Error: documentStoreController.createDocumentStore - body not provided!` ) } + + const orgId = req.user?.activeOrganizationId + if (!orgId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.createDocumentStore - organizationId not provided!` + ) + } + const body = req.body + body.workspaceId = req.user?.activeWorkspaceId + if (!body.workspaceId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.createDocumentStore - workspaceId not provided!` + ) + } const docStore = DocumentStoreDTO.toEntity(body) - const apiResponse = await documentStoreService.createDocumentStore(docStore) + const apiResponse = await documentStoreService.createDocumentStore(docStore, orgId) return res.json(apiResponse) } catch (error) { next(error) @@ -26,8 +43,24 @@ const createDocumentStore = async (req: Request, res: Response, next: NextFuncti const getAllDocumentStores = async (req: Request, res: Response, next: NextFunction) => { try { - const apiResponse = await documentStoreService.getAllDocumentStores() - return res.json(DocumentStoreDTO.fromEntities(apiResponse)) + const { page, limit } = getPageAndLimitParams(req) + + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.getAllDocumentStores - workspaceId not provided!` + ) + } + const apiResponse: any = await documentStoreService.getAllDocumentStores(workspaceId, page, limit) + if (apiResponse?.total >= 0) { + return res.json({ + total: apiResponse.total, + data: DocumentStoreDTO.fromEntities(apiResponse.data) + }) + } else { + return res.json(DocumentStoreDTO.fromEntities(apiResponse)) + } } catch (error) { next(error) } @@ -44,7 +77,29 @@ const deleteLoaderFromDocumentStore = async (req: Request, res: Response, next: `Error: documentStoreController.deleteLoaderFromDocumentStore - missing storeId or loaderId.` ) } - const apiResponse = await documentStoreService.deleteLoaderFromDocumentStore(storeId, loaderId) + + const orgId = req.user?.activeOrganizationId + if (!orgId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.createDocumentStore - organizationId not provided!` + ) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.createDocumentStore - workspaceId not provided!` + ) + } + + const apiResponse = await documentStoreService.deleteLoaderFromDocumentStore( + storeId, + loaderId, + orgId, + workspaceId, + getRunningExpressApp().usageCacheManager + ) return res.json(DocumentStoreDTO.fromEntity(apiResponse)) } catch (error) { next(error) @@ -59,9 +114,16 @@ const getDocumentStoreById = async (req: Request, res: Response, next: NextFunct `Error: documentStoreController.getDocumentStoreById - id not provided!` ) } - const apiResponse = await documentStoreService.getDocumentStoreById(req.params.id) + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.getDocumentStoreById - workspaceId not provided!` + ) + } + const apiResponse = await documentStoreService.getDocumentStoreById(req.params.id, workspaceId) if (apiResponse && apiResponse.whereUsed) { - apiResponse.whereUsed = JSON.stringify(await documentStoreService.getUsedChatflowNames(apiResponse)) + apiResponse.whereUsed = JSON.stringify(await documentStoreService.getUsedChatflowNames(apiResponse, workspaceId)) } return res.json(DocumentStoreDTO.fromEntity(apiResponse)) } catch (error) { @@ -83,12 +145,20 @@ const getDocumentStoreFileChunks = async (req: Request, res: Response, next: Nex `Error: documentStoreController.getDocumentStoreFileChunks - fileId not provided!` ) } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.getDocumentStoreFileChunks - workspaceId not provided!` + ) + } const appDataSource = getRunningExpressApp().AppDataSource const page = req.params.pageNo ? parseInt(req.params.pageNo) : 1 const apiResponse = await documentStoreService.getDocumentStoreFileChunks( appDataSource, req.params.storeId, req.params.fileId, + workspaceId, page ) return res.json(apiResponse) @@ -117,10 +187,18 @@ const deleteDocumentStoreFileChunk = async (req: Request, res: Response, next: N `Error: documentStoreController.deleteDocumentStoreFileChunk - chunkId not provided!` ) } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.deleteDocumentStoreFileChunk - workspaceId not provided!` + ) + } const apiResponse = await documentStoreService.deleteDocumentStoreFileChunk( req.params.storeId, req.params.loaderId, - req.params.chunkId + req.params.chunkId, + workspaceId ) return res.json(apiResponse) } catch (error) { @@ -155,12 +233,20 @@ const editDocumentStoreFileChunk = async (req: Request, res: Response, next: Nex `Error: documentStoreController.editDocumentStoreFileChunk - body not provided!` ) } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.editDocumentStoreFileChunk - workspaceId not provided!` + ) + } const apiResponse = await documentStoreService.editDocumentStoreFileChunk( req.params.storeId, req.params.loaderId, req.params.chunkId, body.pageContent, - body.metadata + body.metadata, + workspaceId ) return res.json(apiResponse) } catch (error) { @@ -178,7 +264,14 @@ const saveProcessingLoader = async (req: Request, res: Response, next: NextFunct ) } const body = req.body - const apiResponse = await documentStoreService.saveProcessingLoader(appServer.AppDataSource, body) + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.saveProcessingLoader - workspaceId not provided!` + ) + } + const apiResponse = await documentStoreService.saveProcessingLoader(appServer.AppDataSource, body, workspaceId) return res.json(apiResponse) } catch (error) { next(error) @@ -199,10 +292,33 @@ const processLoader = async (req: Request, res: Response, next: NextFunction) => `Error: documentStoreController.processLoader - body not provided!` ) } + const orgId = req.user?.activeOrganizationId + if (!orgId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.createDocumentStore - organizationId not provided!` + ) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.createDocumentStore - workspaceId not provided!` + ) + } + const subscriptionId = req.user?.activeOrganizationSubscriptionId || '' const docLoaderId = req.params.loaderId const body = req.body const isInternalRequest = req.headers['x-request-from'] === 'internal' - const apiResponse = await documentStoreService.processLoaderMiddleware(body, docLoaderId, isInternalRequest) + const apiResponse = await documentStoreService.processLoaderMiddleware( + body, + docLoaderId, + orgId, + workspaceId, + subscriptionId, + getRunningExpressApp().usageCacheManager, + isInternalRequest + ) return res.json(apiResponse) } catch (error) { next(error) @@ -223,7 +339,14 @@ const updateDocumentStore = async (req: Request, res: Response, next: NextFuncti `Error: documentStoreController.updateDocumentStore - body not provided!` ) } - const store = await documentStoreService.getDocumentStoreById(req.params.id) + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.updateDocumentStore - workspaceId not provided!` + ) + } + const store = await documentStoreService.getDocumentStoreById(req.params.id, workspaceId) if (!store) { throw new InternalFlowiseError( StatusCodes.NOT_FOUND, @@ -248,7 +371,26 @@ const deleteDocumentStore = async (req: Request, res: Response, next: NextFuncti `Error: documentStoreController.deleteDocumentStore - storeId not provided!` ) } - const apiResponse = await documentStoreService.deleteDocumentStore(req.params.id) + const orgId = req.user?.activeOrganizationId + if (!orgId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.createDocumentStore - organizationId not provided!` + ) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.createDocumentStore - workspaceId not provided!` + ) + } + const apiResponse = await documentStoreService.deleteDocumentStore( + req.params.id, + orgId, + workspaceId, + getRunningExpressApp().usageCacheManager + ) return res.json(apiResponse) } catch (error) { next(error) @@ -263,9 +405,30 @@ const previewFileChunks = async (req: Request, res: Response, next: NextFunction `Error: documentStoreController.previewFileChunks - body not provided!` ) } + const orgId = req.user?.activeOrganizationId + if (!orgId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.createDocumentStore - organizationId not provided!` + ) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.createDocumentStore - workspaceId not provided!` + ) + } + const subscriptionId = req.user?.activeOrganizationSubscriptionId || '' const body = req.body body.preview = true - const apiResponse = await documentStoreService.previewChunksMiddleware(body) + const apiResponse = await documentStoreService.previewChunksMiddleware( + body, + orgId, + workspaceId, + subscriptionId, + getRunningExpressApp().usageCacheManager + ) return res.json(apiResponse) } catch (error) { next(error) @@ -286,8 +449,30 @@ const insertIntoVectorStore = async (req: Request, res: Response, next: NextFunc if (typeof req.body === 'undefined') { throw new Error('Error: documentStoreController.insertIntoVectorStore - body not provided!') } + const orgId = req.user?.activeOrganizationId + if (!orgId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.createDocumentStore - organizationId not provided!` + ) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.createDocumentStore - workspaceId not provided!` + ) + } + const subscriptionId = req.user?.activeOrganizationSubscriptionId || '' const body = req.body - const apiResponse = await documentStoreService.insertIntoVectorStoreMiddleware(body) + const apiResponse = await documentStoreService.insertIntoVectorStoreMiddleware( + body, + false, + orgId, + workspaceId, + subscriptionId, + getRunningExpressApp().usageCacheManager + ) getRunningExpressApp().metricsProvider?.incrementCounter(FLOWISE_METRIC_COUNTERS.VECTORSTORE_UPSERT, { status: FLOWISE_COUNTER_STATUS.SUCCESS }) @@ -321,7 +506,14 @@ const deleteVectorStoreFromStore = async (req: Request, res: Response, next: Nex `Error: documentStoreController.deleteVectorStoreFromStore - storeId not provided!` ) } - const apiResponse = await documentStoreService.deleteVectorStoreFromStore(req.params.storeId) + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.deleteVectorStoreFromStore - workspaceId not provided!` + ) + } + const apiResponse = await documentStoreService.deleteVectorStoreFromStore(req.params.storeId, workspaceId) return res.json(apiResponse) } catch (error) { next(error) @@ -335,7 +527,14 @@ const saveVectorStoreConfig = async (req: Request, res: Response, next: NextFunc } const body = req.body const appDataSource = getRunningExpressApp().AppDataSource - const apiResponse = await documentStoreService.saveVectorStoreConfig(appDataSource, body) + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.saveVectorStoreConfig - workspaceId not provided!` + ) + } + const apiResponse = await documentStoreService.saveVectorStoreConfig(appDataSource, body, true, workspaceId) return res.json(apiResponse) } catch (error) { next(error) @@ -348,7 +547,14 @@ const updateVectorStoreConfigOnly = async (req: Request, res: Response, next: Ne throw new Error('Error: documentStoreController.updateVectorStoreConfigOnly - body not provided!') } const body = req.body - const apiResponse = await documentStoreService.updateVectorStoreConfigOnly(body) + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.updateVectorStoreConfigOnly - workspaceId not provided!` + ) + } + const apiResponse = await documentStoreService.updateVectorStoreConfigOnly(body, workspaceId) return res.json(apiResponse) } catch (error) { next(error) @@ -393,9 +599,32 @@ const upsertDocStoreMiddleware = async (req: Request, res: Response, next: NextF if (typeof req.body === 'undefined') { throw new Error('Error: documentStoreController.upsertDocStoreMiddleware - body not provided!') } + const orgId = req.user?.activeOrganizationId + if (!orgId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.createDocumentStore - organizationId not provided!` + ) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.createDocumentStore - workspaceId not provided!` + ) + } + const subscriptionId = req.user?.activeOrganizationSubscriptionId || '' const body = req.body const files = (req.files as Express.Multer.File[]) || [] - const apiResponse = await documentStoreService.upsertDocStoreMiddleware(req.params.id, body, files) + const apiResponse = await documentStoreService.upsertDocStoreMiddleware( + req.params.id, + body, + files, + orgId, + workspaceId, + subscriptionId, + getRunningExpressApp().usageCacheManager + ) getRunningExpressApp().metricsProvider?.incrementCounter(FLOWISE_METRIC_COUNTERS.VECTORSTORE_UPSERT, { status: FLOWISE_COUNTER_STATUS.SUCCESS }) @@ -416,8 +645,30 @@ const refreshDocStoreMiddleware = async (req: Request, res: Response, next: Next `Error: documentStoreController.refreshDocStoreMiddleware - storeId not provided!` ) } + const orgId = req.user?.activeOrganizationId + if (!orgId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.createDocumentStore - organizationId not provided!` + ) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.createDocumentStore - workspaceId not provided!` + ) + } + const subscriptionId = req.user?.activeOrganizationSubscriptionId || '' const body = req.body - const apiResponse = await documentStoreService.refreshDocStoreMiddleware(req.params.id, body) + const apiResponse = await documentStoreService.refreshDocStoreMiddleware( + req.params.id, + body, + orgId, + workspaceId, + subscriptionId, + getRunningExpressApp().usageCacheManager + ) getRunningExpressApp().metricsProvider?.incrementCounter(FLOWISE_METRIC_COUNTERS.VECTORSTORE_UPSERT, { status: FLOWISE_COUNTER_STATUS.SUCCESS }) diff --git a/packages/server/src/controllers/evaluations/index.ts b/packages/server/src/controllers/evaluations/index.ts new file mode 100644 index 000000000..b38213aa0 --- /dev/null +++ b/packages/server/src/controllers/evaluations/index.ts @@ -0,0 +1,186 @@ +import { Request, Response, NextFunction } from 'express' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { StatusCodes } from 'http-status-codes' +import evaluationsService from '../../services/evaluations' +import { getPageAndLimitParams } from '../../utils/pagination' + +const createEvaluation = async (req: Request, res: Response, next: NextFunction) => { + try { + if (!req.body) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: evaluationsService.createEvaluation - body not provided!` + ) + } + const orgId = req.user?.activeOrganizationId + if (!orgId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: evaluationsService.createEvaluation - organization ${orgId} not found!` + ) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: evaluationsService.createEvaluation - workspace ${workspaceId} not found!` + ) + } + const body = req.body + body.workspaceId = workspaceId + + const httpProtocol = req.get('x-forwarded-proto') || req.get('X-Forwarded-Proto') || req.protocol + const baseURL = `${httpProtocol}://${req.get('host')}` + const apiResponse = await evaluationsService.createEvaluation(body, baseURL, orgId, workspaceId) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const runAgain = async (req: Request, res: Response, next: NextFunction) => { + try { + if (typeof req.params === 'undefined' || !req.params.id) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: evaluationsService.runAgain - id not provided!`) + } + const orgId = req.user?.activeOrganizationId + if (!orgId) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Error: evaluationsService.runAgain - organization ${orgId} not found!`) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: evaluationsService.runAgain - workspace ${workspaceId} not found!` + ) + } + const httpProtocol = req.get('x-forwarded-proto') || req.get('X-Forwarded-Proto') || req.protocol + const baseURL = `${httpProtocol}://${req.get('host')}` + const apiResponse = await evaluationsService.runAgain(req.params.id, baseURL, orgId, workspaceId) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const getEvaluation = async (req: Request, res: Response, next: NextFunction) => { + try { + if (typeof req.params === 'undefined' || !req.params.id) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: evaluationsService.getEvaluation - id not provided!`) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: evaluationsService.getEvaluation - workspace ${workspaceId} not found!` + ) + } + const apiResponse = await evaluationsService.getEvaluation(req.params.id, workspaceId) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const deleteEvaluation = async (req: Request, res: Response, next: NextFunction) => { + try { + if (typeof req.params === 'undefined' || !req.params.id) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: evaluationsService.deleteEvaluation - id not provided!`) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: evaluationsService.deleteEvaluation - workspace ${workspaceId} not found!` + ) + } + const apiResponse = await evaluationsService.deleteEvaluation(req.params.id, workspaceId) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const getAllEvaluations = async (req: Request, res: Response, next: NextFunction) => { + try { + const { page, limit } = getPageAndLimitParams(req) + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: evaluationsService.getAllEvaluations - workspace ${workspaceId} not found!` + ) + } + const apiResponse = await evaluationsService.getAllEvaluations(workspaceId, page, limit) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const isOutdated = async (req: Request, res: Response, next: NextFunction) => { + try { + if (typeof req.params === 'undefined' || !req.params.id) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: evaluationsService.isOutdated - id not provided!`) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: evaluationsService.isOutdated - workspace ${workspaceId} not found!` + ) + } + const apiResponse = await evaluationsService.isOutdated(req.params.id, workspaceId) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const getVersions = async (req: Request, res: Response, next: NextFunction) => { + try { + if (typeof req.params === 'undefined' || !req.params.id) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: evaluationsService.getVersions - id not provided!`) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: evaluationsService.getVersions - workspace ${workspaceId} not found!` + ) + } + const apiResponse = await evaluationsService.getVersions(req.params.id, workspaceId) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const patchDeleteEvaluations = async (req: Request, res: Response, next: NextFunction) => { + try { + const ids = req.body.ids ?? [] + const isDeleteAllVersion = req.body.isDeleteAllVersion ?? false + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: evaluationsService.patchDeleteEvaluations - workspace ${workspaceId} not found!` + ) + } + const apiResponse = await evaluationsService.patchDeleteEvaluations(ids, workspaceId, isDeleteAllVersion) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +export default { + createEvaluation, + getEvaluation, + deleteEvaluation, + getAllEvaluations, + isOutdated, + runAgain, + getVersions, + patchDeleteEvaluations +} diff --git a/packages/server/src/controllers/evaluators/index.ts b/packages/server/src/controllers/evaluators/index.ts new file mode 100644 index 000000000..5f864a85c --- /dev/null +++ b/packages/server/src/controllers/evaluators/index.ts @@ -0,0 +1,104 @@ +import { Request, Response, NextFunction } from 'express' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { StatusCodes } from 'http-status-codes' +import evaluatorService from '../../services/evaluator' +import { getPageAndLimitParams } from '../../utils/pagination' + +const getAllEvaluators = async (req: Request, res: Response, next: NextFunction) => { + try { + const { page, limit } = getPageAndLimitParams(req) + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: evaluatorService.getAllEvaluators - workspace ${workspaceId} not found!` + ) + } + const apiResponse = await evaluatorService.getAllEvaluators(workspaceId, page, limit) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const getEvaluator = async (req: Request, res: Response, next: NextFunction) => { + try { + if (typeof req.params === 'undefined' || !req.params.id) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: evaluatorService.getEvaluator - id not provided!`) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: evaluatorService.getEvaluator - workspace ${workspaceId} not found!` + ) + } + const apiResponse = await evaluatorService.getEvaluator(req.params.id, workspaceId) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const createEvaluator = async (req: Request, res: Response, next: NextFunction) => { + try { + if (!req.body) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: evaluatorService.createEvaluator - body not provided!`) + } + const body = req.body + body.workspaceId = req.user?.activeWorkspaceId + const apiResponse = await evaluatorService.createEvaluator(body) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const updateEvaluator = async (req: Request, res: Response, next: NextFunction) => { + try { + if (!req.body) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: evaluatorService.updateEvaluator - body not provided!`) + } + if (typeof req.params === 'undefined' || !req.params.id) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: evaluatorService.updateEvaluator - id not provided!`) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: evaluatorService.updateEvaluator - workspace ${workspaceId} not found!` + ) + } + const apiResponse = await evaluatorService.updateEvaluator(req.params.id, req.body, workspaceId) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const deleteEvaluator = async (req: Request, res: Response, next: NextFunction) => { + try { + if (typeof req.params === 'undefined' || !req.params.id) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: evaluatorService.deleteEvaluator - id not provided!`) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: evaluatorService.deleteEvaluator - workspace ${workspaceId} not found!` + ) + } + const apiResponse = await evaluatorService.deleteEvaluator(req.params.id, workspaceId) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +export default { + getAllEvaluators, + getEvaluator, + createEvaluator, + updateEvaluator, + deleteEvaluator +} diff --git a/packages/server/src/controllers/executions/index.ts b/packages/server/src/controllers/executions/index.ts index 85ba3c729..074b0efa9 100644 --- a/packages/server/src/controllers/executions/index.ts +++ b/packages/server/src/controllers/executions/index.ts @@ -5,7 +5,8 @@ import { ExecutionState } from '../../Interface' const getExecutionById = async (req: Request, res: Response, next: NextFunction) => { try { const executionId = req.params.id - const execution = await executionsService.getExecutionById(executionId) + const workspaceId = req.user?.activeWorkspaceId + const execution = await executionsService.getExecutionById(executionId, workspaceId) return res.json(execution) } catch (error) { next(error) @@ -25,7 +26,8 @@ const getPublicExecutionById = async (req: Request, res: Response, next: NextFun const updateExecution = async (req: Request, res: Response, next: NextFunction) => { try { const executionId = req.params.id - const execution = await executionsService.updateExecution(executionId, req.body) + const workspaceId = req.user?.activeWorkspaceId + const execution = await executionsService.updateExecution(executionId, req.body, workspaceId) return res.json(execution) } catch (error) { next(error) @@ -37,11 +39,15 @@ const getAllExecutions = async (req: Request, res: Response, next: NextFunction) // Extract all possible filters from query params const filters: any = {} + // Add workspace ID filter + filters.workspaceId = req.user?.activeWorkspaceId + // ID filter if (req.query.id) filters.id = req.query.id as string // Flow and session filters if (req.query.agentflowId) filters.agentflowId = req.query.agentflowId as string + if (req.query.agentflowName) filters.agentflowName = req.query.agentflowName as string if (req.query.sessionId) filters.sessionId = req.query.sessionId as string // State filter @@ -86,6 +92,7 @@ const getAllExecutions = async (req: Request, res: Response, next: NextFunction) const deleteExecutions = async (req: Request, res: Response, next: NextFunction) => { try { let executionIds: string[] = [] + const workspaceId = req.user?.activeWorkspaceId // Check if we're deleting a single execution from URL param if (req.params.id) { @@ -98,7 +105,7 @@ const deleteExecutions = async (req: Request, res: Response, next: NextFunction) return res.status(400).json({ success: false, message: 'No execution IDs provided' }) } - const result = await executionsService.deleteExecutions(executionIds) + const result = await executionsService.deleteExecutions(executionIds, workspaceId) return res.json(result) } catch (error) { next(error) diff --git a/packages/server/src/controllers/export-import/index.ts b/packages/server/src/controllers/export-import/index.ts index ba9d85379..ae2a86928 100644 --- a/packages/server/src/controllers/export-import/index.ts +++ b/packages/server/src/controllers/export-import/index.ts @@ -1,9 +1,18 @@ import { NextFunction, Request, Response } from 'express' +import { StatusCodes } from 'http-status-codes' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' import exportImportService from '../../services/export-import' const exportData = async (req: Request, res: Response, next: NextFunction) => { try { - const apiResponse = await exportImportService.exportData(exportImportService.convertExportInput(req.body)) + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: exportImportController.exportData - workspace ${workspaceId} not found!` + ) + } + const apiResponse = await exportImportService.exportData(exportImportService.convertExportInput(req.body), workspaceId) return res.json(apiResponse) } catch (error) { next(error) @@ -12,9 +21,29 @@ const exportData = async (req: Request, res: Response, next: NextFunction) => { const importData = async (req: Request, res: Response, next: NextFunction) => { try { + const orgId = req.user?.activeOrganizationId + if (!orgId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: exportImportController.importData - organization ${orgId} not found!` + ) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: exportImportController.importData - workspace ${workspaceId} not found!` + ) + } + const subscriptionId = req.user?.activeOrganizationSubscriptionId || '' + const importData = req.body - await exportImportService.importData(importData) - return res.json({ message: 'success' }) + if (!importData) { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, 'Error: exportImportController.importData - importData is required!') + } + + await exportImportService.importData(importData, orgId, workspaceId, subscriptionId) + return res.status(StatusCodes.OK).json({ message: 'success' }) } catch (error) { next(error) } diff --git a/packages/server/src/controllers/files/index.ts b/packages/server/src/controllers/files/index.ts new file mode 100644 index 000000000..37f508a40 --- /dev/null +++ b/packages/server/src/controllers/files/index.ts @@ -0,0 +1,59 @@ +import path from 'path' +import { NextFunction, Request, Response } from 'express' +import { getFilesListFromStorage, getStoragePath, removeSpecificFileFromStorage } from 'flowise-components' +import { updateStorageUsage } from '../../utils/quotaUsage' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { StatusCodes } from 'http-status-codes' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' + +const getAllFiles = async (req: Request, res: Response, next: NextFunction) => { + try { + const activeOrganizationId = req.user?.activeOrganizationId + if (!activeOrganizationId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: filesController.getAllFiles - organization ${activeOrganizationId} not found!` + ) + } + const apiResponse = await getFilesListFromStorage(activeOrganizationId) + const filesList = apiResponse.map((file: any) => ({ + ...file, + // replace org id because we don't want to expose it + path: file.path.replace(getStoragePath(), '').replace(`${path.sep}${activeOrganizationId}${path.sep}`, '') + })) + return res.json(filesList) + } catch (error) { + next(error) + } +} + +const deleteFile = async (req: Request, res: Response, next: NextFunction) => { + try { + const activeOrganizationId = req.user?.activeOrganizationId + if (!activeOrganizationId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: filesController.deleteFile - organization ${activeOrganizationId} not found!` + ) + } + const activeWorkspaceId = req.user?.activeWorkspaceId + if (!activeWorkspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: filesController.deleteFile - workspace ${activeWorkspaceId} not found!` + ) + } + const filePath = req.query.path as string + const paths = filePath.split(path.sep).filter((path) => path !== '') + const { totalSize } = await removeSpecificFileFromStorage(activeOrganizationId, ...paths) + await updateStorageUsage(activeOrganizationId, activeWorkspaceId, totalSize, getRunningExpressApp().usageCacheManager) + return res.json({ message: 'file_deleted' }) + } catch (error) { + next(error) + } +} + +export default { + getAllFiles, + deleteFile +} diff --git a/packages/server/src/controllers/flow-configs/index.ts b/packages/server/src/controllers/flow-configs/index.ts index c09262660..ba5fadeee 100644 --- a/packages/server/src/controllers/flow-configs/index.ts +++ b/packages/server/src/controllers/flow-configs/index.ts @@ -11,7 +11,14 @@ const getSingleFlowConfig = async (req: Request, res: Response, next: NextFuncti `Error: flowConfigsController.getSingleFlowConfig - id not provided!` ) } - const apiResponse = await flowConfigsService.getSingleFlowConfig(req.params.id) + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: flowConfigsController.getSingleFlowConfig - workspace ${workspaceId} not found!` + ) + } + const apiResponse = await flowConfigsService.getSingleFlowConfig(req.params.id, workspaceId) return res.json(apiResponse) } catch (error) { next(error) diff --git a/packages/server/src/controllers/get-upload-file/index.ts b/packages/server/src/controllers/get-upload-file/index.ts index a33b73e0a..f43232e4c 100644 --- a/packages/server/src/controllers/get-upload-file/index.ts +++ b/packages/server/src/controllers/get-upload-file/index.ts @@ -4,6 +4,9 @@ import contentDisposition from 'content-disposition' import { streamStorageFile } from 'flowise-components' import { StatusCodes } from 'http-status-codes' import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { ChatFlow } from '../../database/entities/ChatFlow' +import { Workspace } from '../../enterprise/database/entities/workspace.entity' const streamUploadedFile = async (req: Request, res: Response, next: NextFunction) => { try { @@ -13,8 +16,33 @@ const streamUploadedFile = async (req: Request, res: Response, next: NextFunctio const chatflowId = req.query.chatflowId as string const chatId = req.query.chatId as string const fileName = req.query.fileName as string - res.setHeader('Content-Disposition', contentDisposition(fileName)) - const fileStream = await streamStorageFile(chatflowId, chatId, fileName) + const download = req.query.download === 'true' // Check if download parameter is set + + const appServer = getRunningExpressApp() + + // This can be public API, so we can only get orgId from the chatflow + const chatflow = await appServer.AppDataSource.getRepository(ChatFlow).findOneBy({ + id: chatflowId + }) + if (!chatflow) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Chatflow ${chatflowId} not found`) + } + const chatflowWorkspaceId = chatflow.workspaceId + const workspace = await appServer.AppDataSource.getRepository(Workspace).findOneBy({ + id: chatflowWorkspaceId + }) + if (!workspace) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Workspace ${chatflowWorkspaceId} not found`) + } + const orgId = workspace.organizationId as string + + // Set Content-Disposition header - force attachment for download + if (download) { + res.setHeader('Content-Disposition', contentDisposition(fileName, { type: 'attachment' })) + } else { + res.setHeader('Content-Disposition', contentDisposition(fileName)) + } + const fileStream = await streamStorageFile(chatflowId, chatId, fileName, orgId) if (!fileStream) throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: streamStorageFile`) diff --git a/packages/server/src/controllers/internal-predictions/index.ts b/packages/server/src/controllers/internal-predictions/index.ts index 5e53a1d65..b5614faf2 100644 --- a/packages/server/src/controllers/internal-predictions/index.ts +++ b/packages/server/src/controllers/internal-predictions/index.ts @@ -1,12 +1,22 @@ -import { Request, Response, NextFunction } from 'express' -import { utilBuildChatflow } from '../../utils/buildChatflow' -import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { NextFunction, Request, Response } from 'express' +import { StatusCodes } from 'http-status-codes' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' import { getErrorMessage } from '../../errors/utils' import { MODE } from '../../Interface' +import chatflowService from '../../services/chatflows' +import { utilBuildChatflow } from '../../utils/buildChatflow' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' // Send input message and get prediction result (Internal) const createInternalPrediction = async (req: Request, res: Response, next: NextFunction) => { try { + const workspaceId = req.user?.activeWorkspaceId + + const chatflow = await chatflowService.getChatflowById(req.params.id, workspaceId) + if (!chatflow) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Chatflow ${req.params.id} not found`) + } + if (req.body.streaming || req.body.streaming === 'true') { createAndStreamInternalPrediction(req, res, next) return diff --git a/packages/server/src/controllers/log/index.ts b/packages/server/src/controllers/log/index.ts new file mode 100644 index 000000000..9df968e06 --- /dev/null +++ b/packages/server/src/controllers/log/index.ts @@ -0,0 +1,16 @@ +import { Request, Response, NextFunction } from 'express' +import logService from '../../services/log' + +// Get logs +const getLogs = async (req: Request, res: Response, next: NextFunction) => { + try { + const apiResponse = await logService.getLogs(req.query?.startDate as string, req.query?.endDate as string) + res.send(apiResponse) + } catch (error) { + next(error) + } +} + +export default { + getLogs +} diff --git a/packages/server/src/controllers/marketplaces/index.ts b/packages/server/src/controllers/marketplaces/index.ts index db947151f..552336855 100644 --- a/packages/server/src/controllers/marketplaces/index.ts +++ b/packages/server/src/controllers/marketplaces/index.ts @@ -1,7 +1,7 @@ -import { Request, Response, NextFunction } from 'express' -import marketplacesService from '../../services/marketplaces' -import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { NextFunction, Request, Response } from 'express' import { StatusCodes } from 'http-status-codes' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import marketplacesService from '../../services/marketplaces' // Get all templates for marketplaces const getAllTemplates = async (req: Request, res: Response, next: NextFunction) => { @@ -21,7 +21,14 @@ const deleteCustomTemplate = async (req: Request, res: Response, next: NextFunct `Error: marketplacesService.deleteCustomTemplate - id not provided!` ) } - const apiResponse = await marketplacesService.deleteCustomTemplate(req.params.id) + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: marketplacesController.deleteCustomTemplate - workspace ${workspaceId} not found!` + ) + } + const apiResponse = await marketplacesService.deleteCustomTemplate(req.params.id, workspaceId) return res.json(apiResponse) } catch (error) { next(error) @@ -30,7 +37,7 @@ const deleteCustomTemplate = async (req: Request, res: Response, next: NextFunct const getAllCustomTemplates = async (req: Request, res: Response, next: NextFunction) => { try { - const apiResponse = await marketplacesService.getAllCustomTemplates() + const apiResponse = await marketplacesService.getAllCustomTemplates(req.user?.activeWorkspaceId) return res.json(apiResponse) } catch (error) { next(error) @@ -45,7 +52,15 @@ const saveCustomTemplate = async (req: Request, res: Response, next: NextFunctio `Error: marketplacesService.saveCustomTemplate - body not provided!` ) } - const apiResponse = await marketplacesService.saveCustomTemplate(req.body) + const body = req.body + body.workspaceId = req.user?.activeWorkspaceId + if (!body.workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: marketplacesController.saveCustomTemplate - workspace ${body.workspaceId} not found!` + ) + } + const apiResponse = await marketplacesService.saveCustomTemplate(body) return res.json(apiResponse) } catch (error) { next(error) diff --git a/packages/server/src/controllers/nodes/index.ts b/packages/server/src/controllers/nodes/index.ts index 05b72c546..b6b96cf49 100644 --- a/packages/server/src/controllers/nodes/index.ts +++ b/packages/server/src/controllers/nodes/index.ts @@ -3,6 +3,7 @@ import _ from 'lodash' import nodesService from '../../services/nodes' import { InternalFlowiseError } from '../../errors/internalFlowiseError' import { StatusCodes } from 'http-status-codes' +import { getWorkspaceSearchOptionsFromReq } from '../../enterprise/utils/ControllerServiceUtils' const getAllNodes = async (req: Request, res: Response, next: NextFunction) => { try { @@ -67,7 +68,9 @@ const getSingleNodeAsyncOptions = async (req: Request, res: Response, next: Next `Error: nodesController.getSingleNodeAsyncOptions - name not provided!` ) } - const apiResponse = await nodesService.getSingleNodeAsyncOptions(req.params.name, req.body) + const body = req.body + body.searchOptions = getWorkspaceSearchOptionsFromReq(req) + const apiResponse = await nodesService.getSingleNodeAsyncOptions(req.params.name, body) return res.json(apiResponse) } catch (error) { next(error) @@ -82,7 +85,9 @@ const executeCustomFunction = async (req: Request, res: Response, next: NextFunc `Error: nodesController.executeCustomFunction - body not provided!` ) } - const apiResponse = await nodesService.executeCustomFunction(req.body) + const orgId = req.user?.activeOrganizationId + const workspaceId = req.user?.activeWorkspaceId + const apiResponse = await nodesService.executeCustomFunction(req.body, workspaceId, orgId) return res.json(apiResponse) } catch (error) { next(error) diff --git a/packages/server/src/controllers/openai-assistants/index.ts b/packages/server/src/controllers/openai-assistants/index.ts index 1b516af8c..0e5f91400 100644 --- a/packages/server/src/controllers/openai-assistants/index.ts +++ b/packages/server/src/controllers/openai-assistants/index.ts @@ -5,6 +5,9 @@ import contentDisposition from 'content-disposition' import { InternalFlowiseError } from '../../errors/internalFlowiseError' import { StatusCodes } from 'http-status-codes' import { streamStorageFile } from 'flowise-components' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { ChatFlow } from '../../database/entities/ChatFlow' +import { Workspace } from '../../enterprise/database/entities/workspace.entity' // List available assistants const getAllOpenaiAssistants = async (req: Request, res: Response, next: NextFunction) => { @@ -50,11 +53,29 @@ const getFileFromAssistant = async (req: Request, res: Response, next: NextFunct if (!req.body.chatflowId || !req.body.chatId || !req.body.fileName) { return res.status(500).send(`Invalid file path`) } + const appServer = getRunningExpressApp() const chatflowId = req.body.chatflowId as string const chatId = req.body.chatId as string const fileName = req.body.fileName as string + + // This can be public API, so we can only get orgId from the chatflow + const chatflow = await appServer.AppDataSource.getRepository(ChatFlow).findOneBy({ + id: chatflowId + }) + if (!chatflow) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Chatflow ${chatflowId} not found`) + } + const chatflowWorkspaceId = chatflow.workspaceId + const workspace = await appServer.AppDataSource.getRepository(Workspace).findOneBy({ + id: chatflowWorkspaceId + }) + if (!workspace) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Workspace ${chatflowWorkspaceId} not found`) + } + const orgId = workspace.organizationId as string + res.setHeader('Content-Disposition', contentDisposition(fileName)) - const fileStream = await streamStorageFile(chatflowId, chatId, fileName) + const fileStream = await streamStorageFile(chatflowId, chatId, fileName, orgId) if (!fileStream) throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: getFileFromAssistant`) diff --git a/packages/server/src/controllers/predictions/index.ts b/packages/server/src/controllers/predictions/index.ts index b7f79b642..d467f3157 100644 --- a/packages/server/src/controllers/predictions/index.ts +++ b/packages/server/src/controllers/predictions/index.ts @@ -25,7 +25,9 @@ const createPrediction = async (req: Request, res: Response, next: NextFunction) `Error: predictionsController.createPrediction - body not provided!` ) } - const chatflow = await chatflowsService.getChatflowById(req.params.id) + const workspaceId = req.user?.activeWorkspaceId + + const chatflow = await chatflowsService.getChatflowById(req.params.id, workspaceId) if (!chatflow) { throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Chatflow ${req.params.id} not found`) } diff --git a/packages/server/src/controllers/pricing/index.ts b/packages/server/src/controllers/pricing/index.ts new file mode 100644 index 000000000..3e0603472 --- /dev/null +++ b/packages/server/src/controllers/pricing/index.ts @@ -0,0 +1,81 @@ +import { Request, Response, NextFunction } from 'express' + +const getPricing = async (req: Request, res: Response, next: NextFunction) => { + try { + const PRODUCT_IDS = { + FREE: process.env.CLOUD_FREE_ID, + STARTER: process.env.CLOUD_STARTER_ID, + PRO: process.env.CLOUD_PRO_ID + } + const pricingPlans = [ + { + prodId: PRODUCT_IDS.FREE, + title: 'Free', + subtitle: 'For trying out the platform', + price: '$0', + period: '/month', + features: [ + { text: '2 Flows & Assistants' }, + { text: '100 Predictions / month' }, + { text: '5MB Storage' }, + { text: 'Evaluations & Metrics' }, + { text: 'Custom Embedded Chatbot Branding' }, + { text: 'Community Support' } + ] + }, + { + prodId: PRODUCT_IDS.STARTER, + title: 'Starter', + subtitle: 'For individuals & small teams', + mostPopular: true, + price: '$35', + period: '/month', + features: [ + { text: 'Everything in Free plan, plus' }, + { text: 'Unlimited Flows & Assistants' }, + { text: '10,000 Predictions / month' }, + { text: '1GB Storage' }, + { text: 'Email Support' } + ] + }, + { + prodId: PRODUCT_IDS.PRO, + title: 'Pro', + subtitle: 'For medium-sized businesses', + price: '$65', + period: '/month', + features: [ + { text: 'Everything in Starter plan, plus' }, + { text: '50,000 Predictions / month' }, + { text: '10GB Storage' }, + { text: 'Unlimited Workspaces' }, + { text: '5 users', subtext: '+ $15/user/month' }, + { text: 'Admin Roles & Permissions' }, + { text: 'Priority Support' } + ] + }, + { + title: 'Enterprise', + subtitle: 'For large organizations', + price: 'Contact Us', + features: [ + { text: 'On-Premise Deployment' }, + { text: 'Air-gapped Environments' }, + { text: 'SSO & SAML' }, + { text: 'LDAP & RBAC' }, + { text: 'Versioning' }, + { text: 'Audit Logs' }, + { text: '99.99% Uptime SLA' }, + { text: 'Personalized Support' } + ] + } + ] + return res.status(200).json(pricingPlans) + } catch (error) { + next(error) + } +} + +export default { + getPricing +} diff --git a/packages/server/src/controllers/settings/index.ts b/packages/server/src/controllers/settings/index.ts new file mode 100644 index 000000000..256ad3013 --- /dev/null +++ b/packages/server/src/controllers/settings/index.ts @@ -0,0 +1,15 @@ +import { Request, Response, NextFunction } from 'express' +import settingsService from '../../services/settings' + +const getSettingsList = async (req: Request, res: Response, next: NextFunction) => { + try { + const apiResponse = await settingsService.getSettings() + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +export default { + getSettingsList +} diff --git a/packages/server/src/controllers/stats/index.ts b/packages/server/src/controllers/stats/index.ts index c86bd544a..e159bf053 100644 --- a/packages/server/src/controllers/stats/index.ts +++ b/packages/server/src/controllers/stats/index.ts @@ -45,7 +45,16 @@ const getChatflowStats = async (req: Request, res: Response, next: NextFunction) return res.status(500).send(e) } } - const apiResponse = await statsService.getChatflowStats(chatflowid, chatTypes, startDate, endDate, '', true, feedbackTypeFilters) + const apiResponse = await statsService.getChatflowStats( + chatflowid, + chatTypes, + startDate, + endDate, + '', + true, + feedbackTypeFilters, + req.user?.activeWorkspaceId + ) return res.json(apiResponse) } catch (error) { next(error) diff --git a/packages/server/src/controllers/text-to-speech/index.ts b/packages/server/src/controllers/text-to-speech/index.ts new file mode 100644 index 000000000..af92713a8 --- /dev/null +++ b/packages/server/src/controllers/text-to-speech/index.ts @@ -0,0 +1,233 @@ +import { NextFunction, Request, Response } from 'express' +import { convertTextToSpeechStream } from 'flowise-components' +import { StatusCodes } from 'http-status-codes' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import chatflowsService from '../../services/chatflows' +import textToSpeechService from '../../services/text-to-speech' +import { databaseEntities } from '../../utils' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' + +const generateTextToSpeech = async (req: Request, res: Response) => { + try { + const { + chatId, + chatflowId, + chatMessageId, + text, + provider: bodyProvider, + credentialId: bodyCredentialId, + voice: bodyVoice, + model: bodyModel + } = req.body + + if (!text) { + throw new InternalFlowiseError( + StatusCodes.BAD_REQUEST, + `Error: textToSpeechController.generateTextToSpeech - text not provided!` + ) + } + + let provider: string, credentialId: string, voice: string, model: string + + if (chatflowId) { + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: textToSpeechController.generateTextToSpeech - workspace ${workspaceId} not found!` + ) + } + // Get TTS config from chatflow + const chatflow = await chatflowsService.getChatflowById(chatflowId, workspaceId) + const ttsConfig = JSON.parse(chatflow.textToSpeech) + + // Find the provider with status: true + const activeProviderKey = Object.keys(ttsConfig).find((key) => ttsConfig[key].status === true) + if (!activeProviderKey) { + throw new InternalFlowiseError( + StatusCodes.BAD_REQUEST, + `Error: textToSpeechController.generateTextToSpeech - no active TTS provider configured in chatflow!` + ) + } + + const providerConfig = ttsConfig[activeProviderKey] + provider = activeProviderKey + credentialId = providerConfig.credentialId + voice = providerConfig.voice + model = providerConfig.model + } else { + // Use TTS config from request body + provider = bodyProvider + credentialId = bodyCredentialId + voice = bodyVoice + model = bodyModel + } + + if (!provider) { + throw new InternalFlowiseError( + StatusCodes.BAD_REQUEST, + `Error: textToSpeechController.generateTextToSpeech - provider not provided!` + ) + } + + if (!credentialId) { + throw new InternalFlowiseError( + StatusCodes.BAD_REQUEST, + `Error: textToSpeechController.generateTextToSpeech - credentialId not provided!` + ) + } + + res.setHeader('Content-Type', 'text/event-stream') + res.setHeader('Cache-Control', 'no-cache') + res.setHeader('Connection', 'keep-alive') + res.setHeader('Access-Control-Allow-Origin', '*') + res.setHeader('Access-Control-Allow-Headers', 'Cache-Control') + + const appServer = getRunningExpressApp() + const options = { + orgId: '', + chatflowid: chatflowId || '', + chatId: chatId || '', + appDataSource: appServer.AppDataSource, + databaseEntities: databaseEntities + } + + const textToSpeechConfig = { + name: provider, + credentialId: credentialId, + voice: voice, + model: model + } + + // Create and store AbortController + const abortController = new AbortController() + const ttsAbortId = `tts_${chatId}_${chatMessageId}` + appServer.abortControllerPool.add(ttsAbortId, abortController) + + try { + await convertTextToSpeechStream( + text, + textToSpeechConfig, + options, + abortController, + (format: string) => { + const startResponse = { + event: 'tts_start', + data: { chatMessageId, format } + } + res.write('event: tts_start\n') + res.write(`data: ${JSON.stringify(startResponse)}\n\n`) + }, + (chunk: Buffer) => { + const audioBase64 = chunk.toString('base64') + const clientResponse = { + event: 'tts_data', + data: { chatMessageId, audioChunk: audioBase64 } + } + res.write('event: tts_data\n') + res.write(`data: ${JSON.stringify(clientResponse)}\n\n`) + }, + async () => { + const endResponse = { + event: 'tts_end', + data: { chatMessageId } + } + res.write('event: tts_end\n') + res.write(`data: ${JSON.stringify(endResponse)}\n\n`) + res.end() + // Clean up from pool on successful completion + appServer.abortControllerPool.remove(ttsAbortId) + } + ) + } catch (error) { + // Clean up from pool on error + appServer.abortControllerPool.remove(ttsAbortId) + throw error + } + } catch (error) { + if (!res.headersSent) { + res.setHeader('Content-Type', 'text/event-stream') + res.setHeader('Cache-Control', 'no-cache') + res.setHeader('Connection', 'keep-alive') + } + + const errorResponse = { + event: 'tts_error', + data: { error: error instanceof Error ? error.message : 'TTS generation failed' } + } + res.write('event: tts_error\n') + res.write(`data: ${JSON.stringify(errorResponse)}\n\n`) + res.end() + } +} + +const abortTextToSpeech = async (req: Request, res: Response) => { + try { + const { chatId, chatMessageId, chatflowId } = req.body + + if (!chatId) { + throw new InternalFlowiseError( + StatusCodes.BAD_REQUEST, + `Error: textToSpeechController.abortTextToSpeech - chatId not provided!` + ) + } + + if (!chatMessageId) { + throw new InternalFlowiseError( + StatusCodes.BAD_REQUEST, + `Error: textToSpeechController.abortTextToSpeech - chatMessageId not provided!` + ) + } + + if (!chatflowId) { + throw new InternalFlowiseError( + StatusCodes.BAD_REQUEST, + `Error: textToSpeechController.abortTextToSpeech - chatflowId not provided!` + ) + } + + const appServer = getRunningExpressApp() + + // Abort the TTS generation using existing pool + const ttsAbortId = `tts_${chatId}_${chatMessageId}` + appServer.abortControllerPool.abort(ttsAbortId) + + // Also abort the main chat flow AbortController for auto-TTS + const chatFlowAbortId = `${chatflowId}_${chatId}` + if (appServer.abortControllerPool.get(chatFlowAbortId)) { + appServer.abortControllerPool.abort(chatFlowAbortId) + appServer.sseStreamer.streamMetadataEvent(chatId, { chatId, chatMessageId }) + } + + // Send abort event to client + appServer.sseStreamer.streamTTSAbortEvent(chatId, chatMessageId) + + res.json({ message: 'TTS stream aborted successfully', chatId, chatMessageId }) + } catch (error) { + res.status(500).json({ + error: error instanceof Error ? error.message : 'Failed to abort TTS stream' + }) + } +} + +const getVoices = async (req: Request, res: Response, next: NextFunction) => { + try { + const { provider, credentialId } = req.query + + if (!provider) { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, `Error: textToSpeechController.getVoices - provider not provided!`) + } + + const voices = await textToSpeechService.getVoices(provider as any, credentialId as string) + + return res.json(voices) + } catch (error) { + next(error) + } +} + +export default { + generateTextToSpeech, + abortTextToSpeech, + getVoices +} diff --git a/packages/server/src/controllers/tools/index.ts b/packages/server/src/controllers/tools/index.ts index 35398d13d..301d9420e 100644 --- a/packages/server/src/controllers/tools/index.ts +++ b/packages/server/src/controllers/tools/index.ts @@ -1,14 +1,26 @@ -import { Request, Response, NextFunction } from 'express' -import toolsService from '../../services/tools' -import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { NextFunction, Request, Response } from 'express' import { StatusCodes } from 'http-status-codes' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import toolsService from '../../services/tools' +import { getPageAndLimitParams } from '../../utils/pagination' const createTool = async (req: Request, res: Response, next: NextFunction) => { try { if (!req.body) { throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: toolsController.createTool - body not provided!`) } - const apiResponse = await toolsService.createTool(req.body) + const orgId = req.user?.activeOrganizationId + if (!orgId) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Error: toolsController.createTool - organization ${orgId} not found!`) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Error: toolsController.createTool - workspace ${workspaceId} not found!`) + } + const body = req.body + body.workspaceId = workspaceId + + const apiResponse = await toolsService.createTool(body, orgId) return res.json(apiResponse) } catch (error) { next(error) @@ -20,7 +32,11 @@ const deleteTool = async (req: Request, res: Response, next: NextFunction) => { if (typeof req.params === 'undefined' || !req.params.id) { throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: toolsController.deleteTool - id not provided!`) } - const apiResponse = await toolsService.deleteTool(req.params.id) + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Error: toolsController.deleteTool - workspace ${workspaceId} not found!`) + } + const apiResponse = await toolsService.deleteTool(req.params.id, workspaceId) return res.json(apiResponse) } catch (error) { next(error) @@ -29,7 +45,8 @@ const deleteTool = async (req: Request, res: Response, next: NextFunction) => { const getAllTools = async (req: Request, res: Response, next: NextFunction) => { try { - const apiResponse = await toolsService.getAllTools() + const { page, limit } = getPageAndLimitParams(req) + const apiResponse = await toolsService.getAllTools(req.user?.activeWorkspaceId, page, limit) return res.json(apiResponse) } catch (error) { next(error) @@ -41,7 +58,14 @@ const getToolById = async (req: Request, res: Response, next: NextFunction) => { if (typeof req.params === 'undefined' || !req.params.id) { throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: toolsController.getToolById - id not provided!`) } - const apiResponse = await toolsService.getToolById(req.params.id) + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: toolsController.getToolById - workspace ${workspaceId} not found!` + ) + } + const apiResponse = await toolsService.getToolById(req.params.id, workspaceId) return res.json(apiResponse) } catch (error) { next(error) @@ -56,7 +80,11 @@ const updateTool = async (req: Request, res: Response, next: NextFunction) => { if (!req.body) { throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: toolsController.deleteTool - body not provided!`) } - const apiResponse = await toolsService.updateTool(req.params.id, req.body) + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Error: toolsController.updateTool - workspace ${workspaceId} not found!`) + } + const apiResponse = await toolsService.updateTool(req.params.id, req.body, workspaceId) return res.json(apiResponse) } catch (error) { next(error) diff --git a/packages/server/src/controllers/validation/index.ts b/packages/server/src/controllers/validation/index.ts index a73c5c71e..6ce5da426 100644 --- a/packages/server/src/controllers/validation/index.ts +++ b/packages/server/src/controllers/validation/index.ts @@ -12,7 +12,8 @@ const checkFlowValidation = async (req: Request, res: Response, next: NextFuncti `Error: validationController.checkFlowValidation - id not provided!` ) } - const apiResponse = await validationService.checkFlowValidation(flowId) + const workspaceId = req.user?.activeWorkspaceId + const apiResponse = await validationService.checkFlowValidation(flowId, workspaceId) return res.json(apiResponse) } catch (error) { next(error) diff --git a/packages/server/src/controllers/variables/index.ts b/packages/server/src/controllers/variables/index.ts index a124255a1..3f8455410 100644 --- a/packages/server/src/controllers/variables/index.ts +++ b/packages/server/src/controllers/variables/index.ts @@ -3,6 +3,7 @@ import variablesService from '../../services/variables' import { Variable } from '../../database/entities/Variable' import { InternalFlowiseError } from '../../errors/internalFlowiseError' import { StatusCodes } from 'http-status-codes' +import { getPageAndLimitParams } from '../../utils/pagination' const createVariable = async (req: Request, res: Response, next: NextFunction) => { try { @@ -12,10 +13,19 @@ const createVariable = async (req: Request, res: Response, next: NextFunction) = `Error: variablesController.createVariable - body not provided!` ) } + const orgId = req.user?.activeOrganizationId + if (!orgId) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Error: toolsController.createTool - organization ${orgId} not found!`) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Error: toolsController.createTool - workspace ${workspaceId} not found!`) + } const body = req.body + body.workspaceId = workspaceId const newVariable = new Variable() Object.assign(newVariable, body) - const apiResponse = await variablesService.createVariable(newVariable) + const apiResponse = await variablesService.createVariable(newVariable, orgId) return res.json(apiResponse) } catch (error) { next(error) @@ -27,7 +37,14 @@ const deleteVariable = async (req: Request, res: Response, next: NextFunction) = if (typeof req.params === 'undefined' || !req.params.id) { throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, 'Error: variablesController.deleteVariable - id not provided!') } - const apiResponse = await variablesService.deleteVariable(req.params.id) + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: variablesController.deleteVariable - workspace ${workspaceId} not found!` + ) + } + const apiResponse = await variablesService.deleteVariable(req.params.id, workspaceId) return res.json(apiResponse) } catch (error) { next(error) @@ -36,7 +53,15 @@ const deleteVariable = async (req: Request, res: Response, next: NextFunction) = const getAllVariables = async (req: Request, res: Response, next: NextFunction) => { try { - const apiResponse = await variablesService.getAllVariables() + const { page, limit } = getPageAndLimitParams(req) + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: variablesController.getAllVariables - workspace ${workspaceId} not found!` + ) + } + const apiResponse = await variablesService.getAllVariables(workspaceId, page, limit) return res.json(apiResponse) } catch (error) { next(error) @@ -54,7 +79,14 @@ const updateVariable = async (req: Request, res: Response, next: NextFunction) = 'Error: variablesController.updateVariable - body not provided!' ) } - const variable = await variablesService.getVariableById(req.params.id) + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: variablesController.updateVariable - workspace ${workspaceId} not found!` + ) + } + const variable = await variablesService.getVariableById(req.params.id, workspaceId) if (!variable) { return res.status(404).send(`Variable ${req.params.id} not found in the database`) } diff --git a/packages/server/src/database/entities/ApiKey.ts b/packages/server/src/database/entities/ApiKey.ts index d96610df2..4778962a1 100644 --- a/packages/server/src/database/entities/ApiKey.ts +++ b/packages/server/src/database/entities/ApiKey.ts @@ -18,4 +18,7 @@ export class ApiKey implements IApiKey { @Column({ type: 'timestamp' }) @UpdateDateColumn() updatedDate: Date + + @Column({ nullable: false, type: 'text' }) + workspaceId: string } diff --git a/packages/server/src/database/entities/Assistant.ts b/packages/server/src/database/entities/Assistant.ts index 2b4a6d3bf..1d9eabbe8 100644 --- a/packages/server/src/database/entities/Assistant.ts +++ b/packages/server/src/database/entities/Assistant.ts @@ -26,4 +26,7 @@ export class Assistant implements IAssistant { @Column({ type: 'timestamp' }) @UpdateDateColumn() updatedDate: Date + + @Column({ nullable: false, type: 'text' }) + workspaceId: string } diff --git a/packages/server/src/database/entities/ChatFlow.ts b/packages/server/src/database/entities/ChatFlow.ts index b3a07b82d..d3561aa00 100644 --- a/packages/server/src/database/entities/ChatFlow.ts +++ b/packages/server/src/database/entities/ChatFlow.ts @@ -2,6 +2,13 @@ import { Entity, Column, CreateDateColumn, UpdateDateColumn, PrimaryGeneratedColumn } from 'typeorm' import { ChatflowType, IChatFlow } from '../../Interface' +export enum EnumChatflowType { + CHATFLOW = 'CHATFLOW', + AGENTFLOW = 'AGENTFLOW', + MULTIAGENT = 'MULTIAGENT', + ASSISTANT = 'ASSISTANT' +} + @Entity() export class ChatFlow implements IChatFlow { @PrimaryGeneratedColumn('uuid') @@ -34,13 +41,16 @@ export class ChatFlow implements IChatFlow { @Column({ nullable: true, type: 'text' }) speechToText?: string + @Column({ nullable: true, type: 'text' }) + textToSpeech?: string + @Column({ nullable: true, type: 'text' }) followUpPrompts?: string @Column({ nullable: true, type: 'text' }) category?: string - @Column({ nullable: true, type: 'text' }) + @Column({ type: 'varchar', length: 20, default: EnumChatflowType.CHATFLOW }) type?: ChatflowType @Column({ type: 'timestamp' }) @@ -50,4 +60,7 @@ export class ChatFlow implements IChatFlow { @Column({ type: 'timestamp' }) @UpdateDateColumn() updatedDate: Date + + @Column({ nullable: false, type: 'text' }) + workspaceId: string } diff --git a/packages/server/src/database/entities/Credential.ts b/packages/server/src/database/entities/Credential.ts index daeb0595e..5cff59f49 100644 --- a/packages/server/src/database/entities/Credential.ts +++ b/packages/server/src/database/entities/Credential.ts @@ -23,4 +23,7 @@ export class Credential implements ICredential { @Column({ type: 'timestamp' }) @UpdateDateColumn() updatedDate: Date + + @Column({ nullable: false, type: 'text' }) + workspaceId: string } diff --git a/packages/server/src/database/entities/CustomTemplate.ts b/packages/server/src/database/entities/CustomTemplate.ts index 27b88d78e..ed99cebd3 100644 --- a/packages/server/src/database/entities/CustomTemplate.ts +++ b/packages/server/src/database/entities/CustomTemplate.ts @@ -27,6 +27,9 @@ export class CustomTemplate implements ICustomTemplate { @Column({ nullable: true, type: 'text' }) type?: string + @Column({ nullable: false, type: 'text' }) + workspaceId: string + @Column({ type: 'timestamp' }) @CreateDateColumn() createdDate: Date diff --git a/packages/server/src/database/entities/Dataset.ts b/packages/server/src/database/entities/Dataset.ts new file mode 100644 index 000000000..8acd5f2bb --- /dev/null +++ b/packages/server/src/database/entities/Dataset.ts @@ -0,0 +1,24 @@ +/* eslint-disable */ +import { Entity, Column, CreateDateColumn, UpdateDateColumn, PrimaryGeneratedColumn } from 'typeorm' +import { IAssistant, IDataset } from '../../Interface' + +@Entity() +export class Dataset implements IDataset { + @PrimaryGeneratedColumn('uuid') + id: string + + @Column({ type: 'text' }) + name: string + + @Column({ type: 'text' }) + description: string + + @CreateDateColumn() + createdDate: Date + + @UpdateDateColumn() + updatedDate: Date + + @Column({ nullable: false, type: 'text' }) + workspaceId: string +} diff --git a/packages/server/src/database/entities/DatasetRow.ts b/packages/server/src/database/entities/DatasetRow.ts new file mode 100644 index 000000000..a2a3c1fb3 --- /dev/null +++ b/packages/server/src/database/entities/DatasetRow.ts @@ -0,0 +1,25 @@ +/* eslint-disable */ +import { Entity, Column, CreateDateColumn, UpdateDateColumn, PrimaryGeneratedColumn, Index } from 'typeorm' +import { IAssistant, IDataset, IDatasetRow } from '../../Interface' + +@Entity() +export class DatasetRow implements IDatasetRow { + @PrimaryGeneratedColumn('uuid') + id: string + + @Column({ type: 'text' }) + @Index() + datasetId: string + + @Column({ type: 'text' }) + input: string + + @Column({ type: 'text' }) + output: string + + @UpdateDateColumn() + updatedDate: Date + + @Column({ name: 'sequence_no' }) + sequenceNo: number +} diff --git a/packages/server/src/database/entities/DocumentStore.ts b/packages/server/src/database/entities/DocumentStore.ts index 694db3e3d..9a94fde52 100644 --- a/packages/server/src/database/entities/DocumentStore.ts +++ b/packages/server/src/database/entities/DocumentStore.ts @@ -37,4 +37,7 @@ export class DocumentStore implements IDocumentStore { @Column({ nullable: true, type: 'text' }) recordManagerConfig: string | null + + @Column({ nullable: false, type: 'text' }) + workspaceId: string } diff --git a/packages/server/src/database/entities/Evaluation.ts b/packages/server/src/database/entities/Evaluation.ts new file mode 100644 index 000000000..4f23a05e3 --- /dev/null +++ b/packages/server/src/database/entities/Evaluation.ts @@ -0,0 +1,41 @@ +import { Column, Entity, PrimaryGeneratedColumn, UpdateDateColumn } from 'typeorm' +import { IEvaluation } from '../../Interface' + +@Entity() +export class Evaluation implements IEvaluation { + @PrimaryGeneratedColumn('uuid') + id: string + + @Column({ type: 'text' }) + average_metrics: string + + @Column({ type: 'text' }) + additionalConfig: string + + @Column() + name: string + + @Column() + evaluationType: string + + @Column() + chatflowId: string + + @Column() + chatflowName: string + + @Column() + datasetId: string + + @Column() + datasetName: string + + @Column() + status: string + + @UpdateDateColumn() + runDate: Date + + @Column({ nullable: false, type: 'text' }) + workspaceId: string +} diff --git a/packages/server/src/database/entities/EvaluationRun.ts b/packages/server/src/database/entities/EvaluationRun.ts new file mode 100644 index 000000000..531ec0095 --- /dev/null +++ b/packages/server/src/database/entities/EvaluationRun.ts @@ -0,0 +1,35 @@ +import { Column, Entity, PrimaryGeneratedColumn, UpdateDateColumn } from 'typeorm' +import { IEvaluationRun } from '../../Interface' + +@Entity() +export class EvaluationRun implements IEvaluationRun { + @PrimaryGeneratedColumn('uuid') + id: string + + @Column() + evaluationId: string + + @Column({ type: 'text' }) + input: string + + @Column({ type: 'text' }) + expectedOutput: string + + @UpdateDateColumn() + runDate: Date + + @Column({ type: 'text' }) + actualOutput: string + + @Column({ type: 'text' }) + metrics: string + + @Column({ type: 'text' }) + llmEvaluators: string + + @Column({ type: 'text' }) + evaluators: string + + @Column({ type: 'text' }) + errors: string +} diff --git a/packages/server/src/database/entities/Evaluator.ts b/packages/server/src/database/entities/Evaluator.ts new file mode 100644 index 000000000..a14e0c905 --- /dev/null +++ b/packages/server/src/database/entities/Evaluator.ts @@ -0,0 +1,28 @@ +import { Column, CreateDateColumn, Entity, PrimaryGeneratedColumn, UpdateDateColumn } from 'typeorm' +import { IEvaluator } from '../../Interface' + +//1714808591644 + +@Entity() +export class Evaluator implements IEvaluator { + @PrimaryGeneratedColumn('uuid') + id: string + + @Column() + name: string + + @Column() + type: string + + @Column() + config: string + + @CreateDateColumn() + createdDate: Date + + @UpdateDateColumn() + updatedDate: Date + + @Column({ nullable: false, type: 'text' }) + workspaceId: string +} diff --git a/packages/server/src/database/entities/Execution.ts b/packages/server/src/database/entities/Execution.ts index 483a10ff1..eac53f639 100644 --- a/packages/server/src/database/entities/Execution.ts +++ b/packages/server/src/database/entities/Execution.ts @@ -18,7 +18,7 @@ export class Execution implements IExecution { agentflowId: string @Index() - @Column({ type: 'uuid' }) + @Column({ type: 'varchar' }) sessionId: string @Column({ nullable: true, type: 'text' }) @@ -41,4 +41,7 @@ export class Execution implements IExecution { @ManyToOne(() => ChatFlow) @JoinColumn({ name: 'agentflowId' }) agentflow: ChatFlow + + @Column({ nullable: false, type: 'text' }) + workspaceId: string } diff --git a/packages/server/src/database/entities/Tool.ts b/packages/server/src/database/entities/Tool.ts index 49f7335e1..2e35b64a6 100644 --- a/packages/server/src/database/entities/Tool.ts +++ b/packages/server/src/database/entities/Tool.ts @@ -32,4 +32,7 @@ export class Tool implements ITool { @Column({ type: 'timestamp' }) @UpdateDateColumn() updatedDate: Date + + @Column({ nullable: false, type: 'text' }) + workspaceId: string } diff --git a/packages/server/src/database/entities/Variable.ts b/packages/server/src/database/entities/Variable.ts index 2437e8245..33105c0f6 100644 --- a/packages/server/src/database/entities/Variable.ts +++ b/packages/server/src/database/entities/Variable.ts @@ -23,4 +23,7 @@ export class Variable implements IVariable { @Column({ type: 'timestamp' }) @UpdateDateColumn() updatedDate: Date + + @Column({ nullable: false, type: 'text' }) + workspaceId: string } diff --git a/packages/server/src/database/entities/index.ts b/packages/server/src/database/entities/index.ts index c9152a1d7..ad19b4e2e 100644 --- a/packages/server/src/database/entities/index.ts +++ b/packages/server/src/database/entities/index.ts @@ -9,9 +9,23 @@ import { DocumentStore } from './DocumentStore' import { DocumentStoreFileChunk } from './DocumentStoreFileChunk' import { Lead } from './Lead' import { UpsertHistory } from './UpsertHistory' +import { Dataset } from './Dataset' +import { DatasetRow } from './DatasetRow' +import { EvaluationRun } from './EvaluationRun' +import { Evaluation } from './Evaluation' +import { Evaluator } from './Evaluator' import { ApiKey } from './ApiKey' import { CustomTemplate } from './CustomTemplate' import { Execution } from './Execution' +import { LoginActivity, WorkspaceShared, WorkspaceUsers } from '../../enterprise/database/entities/EnterpriseEntities' +import { User } from '../../enterprise/database/entities/user.entity' +import { Organization } from '../../enterprise/database/entities/organization.entity' +import { Role } from '../../enterprise/database/entities/role.entity' +import { OrganizationUser } from '../../enterprise/database/entities/organization-user.entity' +import { Workspace } from '../../enterprise/database/entities/workspace.entity' +import { WorkspaceUser } from '../../enterprise/database/entities/workspace-user.entity' +import { LoginMethod } from '../../enterprise/database/entities/login-method.entity' +import { LoginSession } from '../../enterprise/database/entities/login-session.entity' export const entities = { ChatFlow, @@ -21,11 +35,27 @@ export const entities = { Tool, Assistant, Variable, + UpsertHistory, DocumentStore, DocumentStoreFileChunk, Lead, - UpsertHistory, + Dataset, + DatasetRow, + Evaluation, + EvaluationRun, + Evaluator, ApiKey, + User, + WorkspaceUsers, + LoginActivity, + WorkspaceShared, CustomTemplate, - Execution + Execution, + Organization, + Role, + OrganizationUser, + Workspace, + WorkspaceUser, + LoginMethod, + LoginSession } diff --git a/packages/server/src/database/migrations/mariadb/1714548873039-AddEvaluation.ts b/packages/server/src/database/migrations/mariadb/1714548873039-AddEvaluation.ts new file mode 100644 index 000000000..d061d3476 --- /dev/null +++ b/packages/server/src/database/migrations/mariadb/1714548873039-AddEvaluation.ts @@ -0,0 +1,41 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddEvaluation1714548873039 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`evaluation\` ( + \`id\` varchar(36) NOT NULL, + \`chatflowId\` LONGTEXT NOT NULL, + \`datasetId\` LONGTEXT NOT NULL, + \`name\` varchar(255) NOT NULL, + \`chatflowName\` varchar(255) NOT NULL, + \`datasetName\` varchar(255) NOT NULL, + \`additionalConfig\` LONGTEXT, + \`average_metrics\` LONGTEXT NOT NULL, + \`status\` varchar(10) NOT NULL, + \`evaluationType\` varchar(20) NOT NULL, + \`runDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_520_ci;` + ) + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`evaluation_run\` ( + \`id\` varchar(36) NOT NULL, + \`evaluationId\` varchar(36) NOT NULL, + \`expectedOutput\` LONGTEXT NOT NULL, + \`actualOutput\` LONGTEXT NOT NULL, + \`evaluators\` LONGTEXT, + \`input\` LONGTEXT DEFAULT NULL, + \`metrics\` TEXT DEFAULT NULL, + \`llmEvaluators\` TEXT DEFAULT NULL, + \`runDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_520_ci;` + ) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE evaluation`) + await queryRunner.query(`DROP TABLE evaluation_run`) + } +} diff --git a/packages/server/src/database/migrations/mariadb/1714548903384-AddDataset.ts b/packages/server/src/database/migrations/mariadb/1714548903384-AddDataset.ts new file mode 100644 index 000000000..641834e10 --- /dev/null +++ b/packages/server/src/database/migrations/mariadb/1714548903384-AddDataset.ts @@ -0,0 +1,31 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddDatasets1714548903384 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`dataset\` ( + \`id\` varchar(36) NOT NULL, + \`name\` varchar(255) NOT NULL, + \`description\` varchar(255) DEFAULT NULL, + \`createdDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6), + \`updatedDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_520_ci;` + ) + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`dataset_row\` ( + \`id\` varchar(36) NOT NULL, + \`datasetId\` varchar(36) NOT NULL, + \`input\` LONGTEXT NOT NULL, + \`output\` LONGTEXT DEFAULT NULL, + \`updatedDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_520_ci;` + ) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE dataset`) + await queryRunner.query(`DROP TABLE dataset_row`) + } +} diff --git a/packages/server/src/database/migrations/mariadb/1714808591644-AddEvaluator.ts b/packages/server/src/database/migrations/mariadb/1714808591644-AddEvaluator.ts new file mode 100644 index 000000000..81d081570 --- /dev/null +++ b/packages/server/src/database/migrations/mariadb/1714808591644-AddEvaluator.ts @@ -0,0 +1,21 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddEvaluator1714808591644 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`evaluator\` ( + \`id\` varchar(36) NOT NULL, + \`name\` varchar(255) NOT NULL, + \`type\` varchar(25) DEFAULT NULL, + \`config\` LONGTEXT DEFAULT NULL, + \`createdDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6), + \`updatedDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_520_ci;` + ) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE evaluator`) + } +} diff --git a/packages/server/src/database/migrations/mariadb/1733752119696-AddSeqNoToDatasetRow.ts b/packages/server/src/database/migrations/mariadb/1733752119696-AddSeqNoToDatasetRow.ts new file mode 100644 index 000000000..3cb553220 --- /dev/null +++ b/packages/server/src/database/migrations/mariadb/1733752119696-AddSeqNoToDatasetRow.ts @@ -0,0 +1,12 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddSeqNoToDatasetRow1733752119696 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + const columnExists = await queryRunner.hasColumn('dataset_row', 'sequence_no') + if (!columnExists) queryRunner.query(`ALTER TABLE \`dataset_row\` ADD COLUMN \`sequence_no\` INT DEFAULT -1;`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE \`dataset_row\` DROP COLUMN \`sequence_no\``) + } +} diff --git a/packages/server/src/database/migrations/mariadb/1743758056188-FixOpenSourceAssistantTable.ts b/packages/server/src/database/migrations/mariadb/1743758056188-FixOpenSourceAssistantTable.ts new file mode 100644 index 000000000..54138c390 --- /dev/null +++ b/packages/server/src/database/migrations/mariadb/1743758056188-FixOpenSourceAssistantTable.ts @@ -0,0 +1,24 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { Assistant } from '../../entities/Assistant' + +export class FixOpenSourceAssistantTable1743758056188 implements MigrationInterface { + name = 'FixOpenSourceAssistantTable1743758056188' + + public async up(queryRunner: QueryRunner): Promise { + const columnExists = await queryRunner.hasColumn('assistant', 'type') + if (!columnExists) { + await queryRunner.query(`ALTER TABLE \`assistant\` ADD COLUMN \`type\` TEXT;`) + await queryRunner.query(`UPDATE \`assistant\` SET \`type\` = 'OPENAI';`) + + const assistants: Assistant[] = await queryRunner.query(`SELECT * FROM \`assistant\`;`) + for (let assistant of assistants) { + const details = JSON.parse(assistant.details) + if (!details?.id) await queryRunner.query(`UPDATE \`assistant\` SET \`type\` = 'CUSTOM' WHERE id = '${assistant.id}';`) + } + } + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE \`assistant\` DROP COLUMN \`type\`;`) + } +} diff --git a/packages/server/src/database/migrations/mariadb/1744964560174-AddErrorToEvaluationRun.ts b/packages/server/src/database/migrations/mariadb/1744964560174-AddErrorToEvaluationRun.ts new file mode 100644 index 000000000..10fc6f6ff --- /dev/null +++ b/packages/server/src/database/migrations/mariadb/1744964560174-AddErrorToEvaluationRun.ts @@ -0,0 +1,12 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddErrorToEvaluationRun1744964560174 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + const columnExists = await queryRunner.hasColumn('evaluation_run', 'errors') + if (!columnExists) queryRunner.query(`ALTER TABLE \`evaluation_run\` ADD COLUMN \`errors\` LONGTEXT NULL DEFAULT '[]';`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE \`evaluation_run\` DROP COLUMN \`errors\`;`) + } +} diff --git a/packages/server/src/database/migrations/mariadb/1747902489801-ModifyExecutionDataColumnType.ts b/packages/server/src/database/migrations/mariadb/1747902489801-ModifyExecutionDataColumnType.ts new file mode 100644 index 000000000..e0afad3ed --- /dev/null +++ b/packages/server/src/database/migrations/mariadb/1747902489801-ModifyExecutionDataColumnType.ts @@ -0,0 +1,11 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class ModifyExecutionDataColumnType1747902489801 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + queryRunner.query(`ALTER TABLE \`execution\` MODIFY COLUMN \`executionData\` LONGTEXT NOT NULL;`) + } + + public async down(queryRunner: QueryRunner): Promise { + queryRunner.query(`ALTER TABLE \`execution\` MODIFY COLUMN \`executionData\` TEXT NOT NULL;`) + } +} diff --git a/packages/server/src/database/migrations/mariadb/1754986457485-AddTextToSpeechToChatFlow.ts b/packages/server/src/database/migrations/mariadb/1754986457485-AddTextToSpeechToChatFlow.ts new file mode 100644 index 000000000..385409561 --- /dev/null +++ b/packages/server/src/database/migrations/mariadb/1754986457485-AddTextToSpeechToChatFlow.ts @@ -0,0 +1,12 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddTextToSpeechToChatFlow1754986457485 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + const columnExists = await queryRunner.hasColumn('chat_flow', 'textToSpeech') + if (!columnExists) queryRunner.query(`ALTER TABLE \`chat_flow\` ADD COLUMN \`textToSpeech\` TEXT;`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE \`chat_flow\` DROP COLUMN \`textToSpeech\`;`) + } +} diff --git a/packages/server/src/database/migrations/mariadb/1755066758601-ModifyChatflowType.ts b/packages/server/src/database/migrations/mariadb/1755066758601-ModifyChatflowType.ts new file mode 100644 index 000000000..a1ff962c4 --- /dev/null +++ b/packages/server/src/database/migrations/mariadb/1755066758601-ModifyChatflowType.ts @@ -0,0 +1,15 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { EnumChatflowType } from '../../entities/ChatFlow' + +export class ModifyChatflowType1755066758601 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(` + UPDATE \`chat_flow\` SET \`type\` = '${EnumChatflowType.CHATFLOW}' WHERE \`type\` IS NULL OR \`type\` = ''; + `) + await queryRunner.query(` + ALTER TABLE \`chat_flow\` MODIFY COLUMN \`type\` VARCHAR(20) NOT NULL DEFAULT '${EnumChatflowType.CHATFLOW}'; + `) + } + + public async down(): Promise {} +} diff --git a/packages/server/src/database/migrations/mariadb/1759419231100-AddTextToSpeechToChatFlow.ts b/packages/server/src/database/migrations/mariadb/1759419231100-AddTextToSpeechToChatFlow.ts new file mode 100644 index 000000000..d527ffb2a --- /dev/null +++ b/packages/server/src/database/migrations/mariadb/1759419231100-AddTextToSpeechToChatFlow.ts @@ -0,0 +1,12 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddTextToSpeechToChatFlow1759419231100 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + const columnExists = await queryRunner.hasColumn('chat_flow', 'textToSpeech') + if (!columnExists) await queryRunner.query(`ALTER TABLE \`chat_flow\` ADD COLUMN \`textToSpeech\` TEXT;`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE \`chat_flow\` DROP COLUMN \`textToSpeech\`;`) + } +} diff --git a/packages/server/src/database/migrations/mariadb/1759424809984-AddChatFlowNameIndex.ts b/packages/server/src/database/migrations/mariadb/1759424809984-AddChatFlowNameIndex.ts new file mode 100644 index 000000000..89e440e8f --- /dev/null +++ b/packages/server/src/database/migrations/mariadb/1759424809984-AddChatFlowNameIndex.ts @@ -0,0 +1,13 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddChatFlowNameIndex1759424809984 implements MigrationInterface { + name = 'AddChatFlowNameIndex1759424809984' + + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(`CREATE INDEX \`IDX_chatflow_name\` ON \`chat_flow\` (\`name\`(191))`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP INDEX \`IDX_chatflow_name\` ON \`chat_flow\``) + } +} diff --git a/packages/server/src/database/migrations/mariadb/1765000000000-FixDocumentStoreFileChunkLongText.ts b/packages/server/src/database/migrations/mariadb/1765000000000-FixDocumentStoreFileChunkLongText.ts new file mode 100644 index 000000000..130f59745 --- /dev/null +++ b/packages/server/src/database/migrations/mariadb/1765000000000-FixDocumentStoreFileChunkLongText.ts @@ -0,0 +1,14 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class FixDocumentStoreFileChunkLongText1765000000000 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE \`document_store_file_chunk\` MODIFY \`pageContent\` LONGTEXT NOT NULL;`) + await queryRunner.query(`ALTER TABLE \`document_store_file_chunk\` MODIFY \`metadata\` LONGTEXT NULL;`) + } + + public async down(queryRunner: QueryRunner): Promise { + // WARNING: Reverting to TEXT may cause data loss if content exceeds the 64KB limit. + await queryRunner.query(`ALTER TABLE \`document_store_file_chunk\` MODIFY \`pageContent\` TEXT NOT NULL;`) + await queryRunner.query(`ALTER TABLE \`document_store_file_chunk\` MODIFY \`metadata\` TEXT NULL;`) + } +} diff --git a/packages/server/src/database/migrations/mariadb/index.ts b/packages/server/src/database/migrations/mariadb/index.ts index 11d774178..07ddb6ed0 100644 --- a/packages/server/src/database/migrations/mariadb/index.ts +++ b/packages/server/src/database/migrations/mariadb/index.ts @@ -17,9 +17,12 @@ import { AddFeedback1707213626553 } from './1707213626553-AddFeedback' import { AddUpsertHistoryEntity1709814301358 } from './1709814301358-AddUpsertHistoryEntity' import { AddLead1710832127079 } from './1710832127079-AddLead' import { AddLeadToChatMessage1711538023578 } from './1711538023578-AddLeadToChatMessage' -import { AddVectorStoreConfigToDocStore1715861032479 } from './1715861032479-AddVectorStoreConfigToDocStore' import { AddDocumentStore1711637331047 } from './1711637331047-AddDocumentStore' +import { AddEvaluation1714548873039 } from './1714548873039-AddEvaluation' +import { AddDatasets1714548903384 } from './1714548903384-AddDataset' import { AddAgentReasoningToChatMessage1714679514451 } from './1714679514451-AddAgentReasoningToChatMessage' +import { AddEvaluator1714808591644 } from './1714808591644-AddEvaluator' +import { AddVectorStoreConfigToDocStore1715861032479 } from './1715861032479-AddVectorStoreConfigToDocStore' import { AddTypeToChatFlow1716300000000 } from './1716300000000-AddTypeToChatFlow' import { AddApiKey1720230151480 } from './1720230151480-AddApiKey' import { AddActionToChatMessage1721078251523 } from './1721078251523-AddActionToChatMessage' @@ -28,7 +31,28 @@ import { AddCustomTemplate1725629836652 } from './1725629836652-AddCustomTemplat import { AddArtifactsToChatMessage1726156258465 } from './1726156258465-AddArtifactsToChatMessage' import { AddFollowUpPrompts1726666318346 } from './1726666318346-AddFollowUpPrompts' import { AddTypeToAssistant1733011290987 } from './1733011290987-AddTypeToAssistant' +import { AddSeqNoToDatasetRow1733752119696 } from './1733752119696-AddSeqNoToDatasetRow' import { AddExecutionEntity1738090872625 } from './1738090872625-AddExecutionEntity' +import { FixOpenSourceAssistantTable1743758056188 } from './1743758056188-FixOpenSourceAssistantTable' +import { AddErrorToEvaluationRun1744964560174 } from './1744964560174-AddErrorToEvaluationRun' +import { ModifyExecutionDataColumnType1747902489801 } from './1747902489801-ModifyExecutionDataColumnType' +import { AddTextToSpeechToChatFlow1754986457485 } from './1754986457485-AddTextToSpeechToChatFlow' +import { ModifyChatflowType1755066758601 } from './1755066758601-ModifyChatflowType' +import { AddTextToSpeechToChatFlow1759419231100 } from './1759419231100-AddTextToSpeechToChatFlow' +import { AddChatFlowNameIndex1759424809984 } from './1759424809984-AddChatFlowNameIndex' +import { FixDocumentStoreFileChunkLongText1765000000000 } from './1765000000000-FixDocumentStoreFileChunkLongText' + +import { AddAuthTables1720230151482 } from '../../../enterprise/database/migrations/mariadb/1720230151482-AddAuthTables' +import { AddWorkspace1725437498242 } from '../../../enterprise/database/migrations/mariadb/1725437498242-AddWorkspace' +import { AddWorkspaceShared1726654922034 } from '../../../enterprise/database/migrations/mariadb/1726654922034-AddWorkspaceShared' +import { AddWorkspaceIdToCustomTemplate1726655750383 } from '../../../enterprise/database/migrations/mariadb/1726655750383-AddWorkspaceIdToCustomTemplate' +import { AddOrganization1727798417345 } from '../../../enterprise/database/migrations/mariadb/1727798417345-AddOrganization' +import { LinkWorkspaceId1729130948686 } from '../../../enterprise/database/migrations/mariadb/1729130948686-LinkWorkspaceId' +import { LinkOrganizationId1729133111652 } from '../../../enterprise/database/migrations/mariadb/1729133111652-LinkOrganizationId' +import { AddSSOColumns1730519457880 } from '../../../enterprise/database/migrations/mariadb/1730519457880-AddSSOColumns' +import { AddPersonalWorkspace1734074497540 } from '../../../enterprise/database/migrations/mariadb/1734074497540-AddPersonalWorkspace' +import { RefactorEnterpriseDatabase1737076223692 } from '../../../enterprise/database/migrations/mariadb/1737076223692-RefactorEnterpriseDatabase' +import { ExecutionLinkWorkspaceId1746862866554 } from '../../../enterprise/database/migrations/mariadb/1746862866554-ExecutionLinkWorkspaceId' export const mariadbMigrations = [ Init1693840429259, @@ -51,15 +75,38 @@ export const mariadbMigrations = [ AddDocumentStore1711637331047, AddLead1710832127079, AddLeadToChatMessage1711538023578, + AddEvaluation1714548873039, + AddDatasets1714548903384, AddAgentReasoningToChatMessage1714679514451, - AddTypeToChatFlow1716300000000, + AddEvaluator1714808591644, AddVectorStoreConfigToDocStore1715861032479, + AddTypeToChatFlow1716300000000, AddApiKey1720230151480, AddActionToChatMessage1721078251523, LongTextColumn1722301395521, AddCustomTemplate1725629836652, - AddArtifactsToChatMessage1726156258465, AddFollowUpPrompts1726666318346, AddTypeToAssistant1733011290987, - AddExecutionEntity1738090872625 + AddArtifactsToChatMessage1726156258465, + AddAuthTables1720230151482, + AddWorkspace1725437498242, + AddWorkspaceShared1726654922034, + AddWorkspaceIdToCustomTemplate1726655750383, + AddOrganization1727798417345, + LinkWorkspaceId1729130948686, + LinkOrganizationId1729133111652, + AddSSOColumns1730519457880, + AddSeqNoToDatasetRow1733752119696, + AddPersonalWorkspace1734074497540, + RefactorEnterpriseDatabase1737076223692, + AddExecutionEntity1738090872625, + FixOpenSourceAssistantTable1743758056188, + AddErrorToEvaluationRun1744964560174, + ExecutionLinkWorkspaceId1746862866554, + ModifyExecutionDataColumnType1747902489801, + AddTextToSpeechToChatFlow1754986457485, + ModifyChatflowType1755066758601, + AddTextToSpeechToChatFlow1759419231100, + AddChatFlowNameIndex1759424809984, + FixDocumentStoreFileChunkLongText1765000000000 ] diff --git a/packages/server/src/database/migrations/mysql/1714548873039-AddEvaluation.ts b/packages/server/src/database/migrations/mysql/1714548873039-AddEvaluation.ts new file mode 100644 index 000000000..0a41682f6 --- /dev/null +++ b/packages/server/src/database/migrations/mysql/1714548873039-AddEvaluation.ts @@ -0,0 +1,41 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddEvaluation1714548873039 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`evaluation\` ( + \`id\` varchar(36) NOT NULL, + \`chatflowId\` LONGTEXT NOT NULL, + \`datasetId\` LONGTEXT NOT NULL, + \`name\` varchar(255) NOT NULL, + \`chatflowName\` varchar(255) NOT NULL, + \`datasetName\` varchar(255) NOT NULL, + \`additionalConfig\` LONGTEXT, + \`average_metrics\` LONGTEXT NOT NULL, + \`status\` varchar(10) NOT NULL, + \`evaluationType\` varchar(20) NOT NULL, + \`runDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;` + ) + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`evaluation_run\` ( + \`id\` varchar(36) NOT NULL, + \`evaluationId\` varchar(36) NOT NULL, + \`expectedOutput\` LONGTEXT NOT NULL, + \`actualOutput\` LONGTEXT NOT NULL, + \`evaluators\` LONGTEXT, + \`input\` LONGTEXT DEFAULT NULL, + \`metrics\` TEXT DEFAULT NULL, + \`llmEvaluators\` TEXT DEFAULT NULL, + \`runDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;` + ) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE evaluation`) + await queryRunner.query(`DROP TABLE evaluation_run`) + } +} diff --git a/packages/server/src/database/migrations/mysql/1714548903384-AddDataset.ts b/packages/server/src/database/migrations/mysql/1714548903384-AddDataset.ts new file mode 100644 index 000000000..e6ca36f94 --- /dev/null +++ b/packages/server/src/database/migrations/mysql/1714548903384-AddDataset.ts @@ -0,0 +1,31 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddDatasets1714548903384 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`dataset\` ( + \`id\` varchar(36) NOT NULL, + \`name\` varchar(255) NOT NULL, + \`description\` varchar(255) DEFAULT NULL, + \`createdDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6), + \`updatedDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;` + ) + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`dataset_row\` ( + \`id\` varchar(36) NOT NULL, + \`datasetId\` varchar(36) NOT NULL, + \`input\` LONGTEXT NOT NULL, + \`output\` LONGTEXT DEFAULT NULL, + \`updatedDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;` + ) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE dataset`) + await queryRunner.query(`DROP TABLE dataset_row`) + } +} diff --git a/packages/server/src/database/migrations/mysql/1714808591644-AddEvaluator.ts b/packages/server/src/database/migrations/mysql/1714808591644-AddEvaluator.ts new file mode 100644 index 000000000..4c9e63dac --- /dev/null +++ b/packages/server/src/database/migrations/mysql/1714808591644-AddEvaluator.ts @@ -0,0 +1,21 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddEvaluator1714808591644 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`evaluator\` ( + \`id\` varchar(36) NOT NULL, + \`name\` varchar(255) NOT NULL, + \`type\` varchar(25) DEFAULT NULL, + \`config\` LONGTEXT DEFAULT NULL, + \`createdDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6), + \`updatedDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;` + ) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE evaluator`) + } +} diff --git a/packages/server/src/database/migrations/mysql/1733752119696-AddSeqNoToDatasetRow.ts b/packages/server/src/database/migrations/mysql/1733752119696-AddSeqNoToDatasetRow.ts new file mode 100644 index 000000000..f3d7f72bb --- /dev/null +++ b/packages/server/src/database/migrations/mysql/1733752119696-AddSeqNoToDatasetRow.ts @@ -0,0 +1,12 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddSeqNoToDatasetRow1733752119696 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + const columnExists = await queryRunner.hasColumn('dataset_row', 'sequence_no') + if (!columnExists) queryRunner.query(`ALTER TABLE \`dataset_row\` ADD COLUMN \`sequence_no\` INT DEFAULT -1;`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "dataset_row" DROP COLUMN "sequence_no";`) + } +} diff --git a/packages/server/src/database/migrations/mysql/1743758056188-FixOpenSourceAssistantTable.ts b/packages/server/src/database/migrations/mysql/1743758056188-FixOpenSourceAssistantTable.ts new file mode 100644 index 000000000..54138c390 --- /dev/null +++ b/packages/server/src/database/migrations/mysql/1743758056188-FixOpenSourceAssistantTable.ts @@ -0,0 +1,24 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { Assistant } from '../../entities/Assistant' + +export class FixOpenSourceAssistantTable1743758056188 implements MigrationInterface { + name = 'FixOpenSourceAssistantTable1743758056188' + + public async up(queryRunner: QueryRunner): Promise { + const columnExists = await queryRunner.hasColumn('assistant', 'type') + if (!columnExists) { + await queryRunner.query(`ALTER TABLE \`assistant\` ADD COLUMN \`type\` TEXT;`) + await queryRunner.query(`UPDATE \`assistant\` SET \`type\` = 'OPENAI';`) + + const assistants: Assistant[] = await queryRunner.query(`SELECT * FROM \`assistant\`;`) + for (let assistant of assistants) { + const details = JSON.parse(assistant.details) + if (!details?.id) await queryRunner.query(`UPDATE \`assistant\` SET \`type\` = 'CUSTOM' WHERE id = '${assistant.id}';`) + } + } + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE \`assistant\` DROP COLUMN \`type\`;`) + } +} diff --git a/packages/server/src/database/migrations/mysql/1744964560174-AddErrorToEvaluationRun.ts b/packages/server/src/database/migrations/mysql/1744964560174-AddErrorToEvaluationRun.ts new file mode 100644 index 000000000..32fb4218f --- /dev/null +++ b/packages/server/src/database/migrations/mysql/1744964560174-AddErrorToEvaluationRun.ts @@ -0,0 +1,12 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddErrorToEvaluationRun1744964560174 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + const columnExists = await queryRunner.hasColumn('evaluation_run', 'errors') + if (!columnExists) queryRunner.query(`ALTER TABLE \`evaluation_run\` ADD COLUMN \`errors\` LONGTEXT NULL DEFAULT ('[]');`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "evaluation_run" DROP COLUMN "errors";`) + } +} diff --git a/packages/server/src/database/migrations/mysql/1746437114935-FixErrorsColumnInEvaluationRun.ts b/packages/server/src/database/migrations/mysql/1746437114935-FixErrorsColumnInEvaluationRun.ts new file mode 100644 index 000000000..d9f80f869 --- /dev/null +++ b/packages/server/src/database/migrations/mysql/1746437114935-FixErrorsColumnInEvaluationRun.ts @@ -0,0 +1,12 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class FixErrorsColumnInEvaluationRun1746437114935 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + const columnExists = await queryRunner.hasColumn('evaluation_run', 'errors') + if (!columnExists) queryRunner.query(`ALTER TABLE \`evaluation_run\` ADD COLUMN \`errors\` LONGTEXT NULL DEFAULT ('[]');`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "evaluation_run" DROP COLUMN "errors";`) + } +} diff --git a/packages/server/src/database/migrations/mysql/1747902489801-ModifyExecutionDataColumnType.ts b/packages/server/src/database/migrations/mysql/1747902489801-ModifyExecutionDataColumnType.ts new file mode 100644 index 000000000..e0afad3ed --- /dev/null +++ b/packages/server/src/database/migrations/mysql/1747902489801-ModifyExecutionDataColumnType.ts @@ -0,0 +1,11 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class ModifyExecutionDataColumnType1747902489801 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + queryRunner.query(`ALTER TABLE \`execution\` MODIFY COLUMN \`executionData\` LONGTEXT NOT NULL;`) + } + + public async down(queryRunner: QueryRunner): Promise { + queryRunner.query(`ALTER TABLE \`execution\` MODIFY COLUMN \`executionData\` TEXT NOT NULL;`) + } +} diff --git a/packages/server/src/database/migrations/mysql/1754986468397-AddTextToSpeechToChatFlow.ts b/packages/server/src/database/migrations/mysql/1754986468397-AddTextToSpeechToChatFlow.ts new file mode 100644 index 000000000..4466a39e0 --- /dev/null +++ b/packages/server/src/database/migrations/mysql/1754986468397-AddTextToSpeechToChatFlow.ts @@ -0,0 +1,12 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddTextToSpeechToChatFlow1754986468397 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + const columnExists = await queryRunner.hasColumn('chat_flow', 'textToSpeech') + if (!columnExists) queryRunner.query(`ALTER TABLE \`chat_flow\` ADD COLUMN \`textToSpeech\` TEXT;`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE \`chat_flow\` DROP COLUMN \`textToSpeech\`;`) + } +} diff --git a/packages/server/src/database/migrations/mysql/1755066758601-ModifyChatflowType.ts b/packages/server/src/database/migrations/mysql/1755066758601-ModifyChatflowType.ts new file mode 100644 index 000000000..a1ff962c4 --- /dev/null +++ b/packages/server/src/database/migrations/mysql/1755066758601-ModifyChatflowType.ts @@ -0,0 +1,15 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { EnumChatflowType } from '../../entities/ChatFlow' + +export class ModifyChatflowType1755066758601 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(` + UPDATE \`chat_flow\` SET \`type\` = '${EnumChatflowType.CHATFLOW}' WHERE \`type\` IS NULL OR \`type\` = ''; + `) + await queryRunner.query(` + ALTER TABLE \`chat_flow\` MODIFY COLUMN \`type\` VARCHAR(20) NOT NULL DEFAULT '${EnumChatflowType.CHATFLOW}'; + `) + } + + public async down(): Promise {} +} diff --git a/packages/server/src/database/migrations/mysql/1759419216034-AddTextToSpeechToChatFlow.ts b/packages/server/src/database/migrations/mysql/1759419216034-AddTextToSpeechToChatFlow.ts new file mode 100644 index 000000000..35cf555f3 --- /dev/null +++ b/packages/server/src/database/migrations/mysql/1759419216034-AddTextToSpeechToChatFlow.ts @@ -0,0 +1,12 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddTextToSpeechToChatFlow1759419216034 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + const columnExists = await queryRunner.hasColumn('chat_flow', 'textToSpeech') + if (!columnExists) await queryRunner.query(`ALTER TABLE \`chat_flow\` ADD COLUMN \`textToSpeech\` TEXT;`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE \`chat_flow\` DROP COLUMN \`textToSpeech\`;`) + } +} diff --git a/packages/server/src/database/migrations/mysql/1759424828558-AddChatFlowNameIndex.ts b/packages/server/src/database/migrations/mysql/1759424828558-AddChatFlowNameIndex.ts new file mode 100644 index 000000000..a42782d4c --- /dev/null +++ b/packages/server/src/database/migrations/mysql/1759424828558-AddChatFlowNameIndex.ts @@ -0,0 +1,13 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddChatFlowNameIndex1759424828558 implements MigrationInterface { + name = 'AddChatFlowNameIndex1759424828558' + + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(`CREATE INDEX \`IDX_chatflow_name\` ON \`chat_flow\` (\`name\`(191))`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP INDEX \`IDX_chatflow_name\` ON \`chat_flow\``) + } +} diff --git a/packages/server/src/database/migrations/mysql/1765000000000-FixDocumentStoreFileChunkLongText.ts b/packages/server/src/database/migrations/mysql/1765000000000-FixDocumentStoreFileChunkLongText.ts new file mode 100644 index 000000000..130f59745 --- /dev/null +++ b/packages/server/src/database/migrations/mysql/1765000000000-FixDocumentStoreFileChunkLongText.ts @@ -0,0 +1,14 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class FixDocumentStoreFileChunkLongText1765000000000 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE \`document_store_file_chunk\` MODIFY \`pageContent\` LONGTEXT NOT NULL;`) + await queryRunner.query(`ALTER TABLE \`document_store_file_chunk\` MODIFY \`metadata\` LONGTEXT NULL;`) + } + + public async down(queryRunner: QueryRunner): Promise { + // WARNING: Reverting to TEXT may cause data loss if content exceeds the 64KB limit. + await queryRunner.query(`ALTER TABLE \`document_store_file_chunk\` MODIFY \`pageContent\` TEXT NOT NULL;`) + await queryRunner.query(`ALTER TABLE \`document_store_file_chunk\` MODIFY \`metadata\` TEXT NULL;`) + } +} diff --git a/packages/server/src/database/migrations/mysql/index.ts b/packages/server/src/database/migrations/mysql/index.ts index fcd054131..c7f5d2eba 100644 --- a/packages/server/src/database/migrations/mysql/index.ts +++ b/packages/server/src/database/migrations/mysql/index.ts @@ -17,9 +17,12 @@ import { AddFeedback1707213626553 } from './1707213626553-AddFeedback' import { AddUpsertHistoryEntity1709814301358 } from './1709814301358-AddUpsertHistoryEntity' import { AddLead1710832127079 } from './1710832127079-AddLead' import { AddLeadToChatMessage1711538023578 } from './1711538023578-AddLeadToChatMessage' -import { AddVectorStoreConfigToDocStore1715861032479 } from './1715861032479-AddVectorStoreConfigToDocStore' import { AddDocumentStore1711637331047 } from './1711637331047-AddDocumentStore' +import { AddEvaluation1714548873039 } from './1714548873039-AddEvaluation' +import { AddDatasets1714548903384 } from './1714548903384-AddDataset' import { AddAgentReasoningToChatMessage1714679514451 } from './1714679514451-AddAgentReasoningToChatMessage' +import { AddEvaluator1714808591644 } from './1714808591644-AddEvaluator' +import { AddVectorStoreConfigToDocStore1715861032479 } from './1715861032479-AddVectorStoreConfigToDocStore' import { AddTypeToChatFlow1716300000000 } from './1716300000000-AddTypeToChatFlow' import { AddApiKey1720230151480 } from './1720230151480-AddApiKey' import { AddActionToChatMessage1721078251523 } from './1721078251523-AddActionToChatMessage' @@ -28,7 +31,29 @@ import { AddCustomTemplate1725629836652 } from './1725629836652-AddCustomTemplat import { AddArtifactsToChatMessage1726156258465 } from './1726156258465-AddArtifactsToChatMessage' import { AddFollowUpPrompts1726666302024 } from './1726666302024-AddFollowUpPrompts' import { AddTypeToAssistant1733011290987 } from './1733011290987-AddTypeToAssistant' +import { AddSeqNoToDatasetRow1733752119696 } from './1733752119696-AddSeqNoToDatasetRow' import { AddExecutionEntity1738090872625 } from './1738090872625-AddExecutionEntity' +import { FixOpenSourceAssistantTable1743758056188 } from './1743758056188-FixOpenSourceAssistantTable' +import { AddErrorToEvaluationRun1744964560174 } from './1744964560174-AddErrorToEvaluationRun' +import { FixErrorsColumnInEvaluationRun1746437114935 } from './1746437114935-FixErrorsColumnInEvaluationRun' +import { ModifyExecutionDataColumnType1747902489801 } from './1747902489801-ModifyExecutionDataColumnType' +import { AddTextToSpeechToChatFlow1754986468397 } from './1754986468397-AddTextToSpeechToChatFlow' +import { ModifyChatflowType1755066758601 } from './1755066758601-ModifyChatflowType' +import { AddTextToSpeechToChatFlow1759419216034 } from './1759419216034-AddTextToSpeechToChatFlow' +import { AddChatFlowNameIndex1759424828558 } from './1759424828558-AddChatFlowNameIndex' +import { FixDocumentStoreFileChunkLongText1765000000000 } from './1765000000000-FixDocumentStoreFileChunkLongText' + +import { AddAuthTables1720230151482 } from '../../../enterprise/database/migrations/mysql/1720230151482-AddAuthTables' +import { AddWorkspace1720230151484 } from '../../../enterprise/database/migrations/mysql/1720230151484-AddWorkspace' +import { AddWorkspaceShared1726654922034 } from '../../../enterprise/database/migrations/mysql/1726654922034-AddWorkspaceShared' +import { AddWorkspaceIdToCustomTemplate1726655750383 } from '../../../enterprise/database/migrations/mysql/1726655750383-AddWorkspaceIdToCustomTemplate' +import { AddOrganization1727798417345 } from '../../../enterprise/database/migrations/mysql/1727798417345-AddOrganization' +import { LinkWorkspaceId1729130948686 } from '../../../enterprise/database/migrations/mysql/1729130948686-LinkWorkspaceId' +import { LinkOrganizationId1729133111652 } from '../../../enterprise/database/migrations/mysql/1729133111652-LinkOrganizationId' +import { AddSSOColumns1730519457880 } from '../../../enterprise/database/migrations/mysql/1730519457880-AddSSOColumns' +import { AddPersonalWorkspace1734074497540 } from '../../../enterprise/database/migrations/mysql/1734074497540-AddPersonalWorkspace' +import { RefactorEnterpriseDatabase1737076223692 } from '../../../enterprise/database/migrations/mysql/1737076223692-RefactorEnterpriseDatabase' +import { ExecutionLinkWorkspaceId1746862866554 } from '../../../enterprise/database/migrations/mysql/1746862866554-ExecutionLinkWorkspaceId' export const mysqlMigrations = [ Init1693840429259, @@ -48,12 +73,15 @@ export const mysqlMigrations = [ AddSpeechToText1706364937060, AddUpsertHistoryEntity1709814301358, AddFeedback1707213626553, + AddEvaluation1714548873039, + AddDatasets1714548903384, + AddEvaluator1714808591644, AddDocumentStore1711637331047, AddLead1710832127079, AddLeadToChatMessage1711538023578, AddAgentReasoningToChatMessage1714679514451, - AddTypeToChatFlow1716300000000, AddVectorStoreConfigToDocStore1715861032479, + AddTypeToChatFlow1716300000000, AddApiKey1720230151480, AddActionToChatMessage1721078251523, LongTextColumn1722301395521, @@ -61,5 +89,26 @@ export const mysqlMigrations = [ AddArtifactsToChatMessage1726156258465, AddFollowUpPrompts1726666302024, AddTypeToAssistant1733011290987, - AddExecutionEntity1738090872625 + AddAuthTables1720230151482, + AddWorkspace1720230151484, + AddWorkspaceShared1726654922034, + AddWorkspaceIdToCustomTemplate1726655750383, + AddOrganization1727798417345, + LinkWorkspaceId1729130948686, + LinkOrganizationId1729133111652, + AddSSOColumns1730519457880, + AddSeqNoToDatasetRow1733752119696, + AddPersonalWorkspace1734074497540, + RefactorEnterpriseDatabase1737076223692, + FixOpenSourceAssistantTable1743758056188, + AddExecutionEntity1738090872625, + AddErrorToEvaluationRun1744964560174, + FixErrorsColumnInEvaluationRun1746437114935, + ExecutionLinkWorkspaceId1746862866554, + ModifyExecutionDataColumnType1747902489801, + AddTextToSpeechToChatFlow1754986468397, + ModifyChatflowType1755066758601, + AddTextToSpeechToChatFlow1759419216034, + AddChatFlowNameIndex1759424828558, + FixDocumentStoreFileChunkLongText1765000000000 ] diff --git a/packages/server/src/database/migrations/postgres/1714548873039-AddEvaluation.ts b/packages/server/src/database/migrations/postgres/1714548873039-AddEvaluation.ts new file mode 100644 index 000000000..7a6a6aa06 --- /dev/null +++ b/packages/server/src/database/migrations/postgres/1714548873039-AddEvaluation.ts @@ -0,0 +1,41 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddEvaluation1714548873039 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS evaluation ( + id uuid NOT NULL DEFAULT uuid_generate_v4(), + "name" varchar NOT NULL, + "chatflowId" text NOT NULL, + "chatflowName" text NOT NULL, + "datasetId" varchar NOT NULL, + "datasetName" varchar NOT NULL, + "additionalConfig" text NULL, + "evaluationType" varchar NOT NULL, + "status" varchar NOT NULL, + "average_metrics" text NULL, + "runDate" timestamp NOT NULL DEFAULT now(), + CONSTRAINT "PK_98989043dd804f54-9830ab99f8" PRIMARY KEY (id) + );` + ) + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS evaluation_run ( + id uuid NOT NULL DEFAULT uuid_generate_v4(), + "evaluationId" varchar NOT NULL, + "input" text NOT NULL, + "expectedOutput" text NULL, + "actualOutput" text NULL, + "evaluators" text NULL, + "llmEvaluators" text DEFAULT NULL, + "metrics" text NULL, + "runDate" timestamp NOT NULL DEFAULT now(), + CONSTRAINT "PK_98989927dd804f54-9840ab23f8" PRIMARY KEY (id) + );` + ) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE evaluation`) + await queryRunner.query(`DROP TABLE evaluation_run`) + } +} diff --git a/packages/server/src/database/migrations/postgres/1714548903384-AddDataset.ts b/packages/server/src/database/migrations/postgres/1714548903384-AddDataset.ts new file mode 100644 index 000000000..0fadef306 --- /dev/null +++ b/packages/server/src/database/migrations/postgres/1714548903384-AddDataset.ts @@ -0,0 +1,31 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddDatasets1714548903384 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS dataset ( + id uuid NOT NULL DEFAULT uuid_generate_v4(), + "name" varchar NOT NULL, + "description" varchar NULL, + "createdDate" timestamp NOT NULL DEFAULT now(), + "updatedDate" timestamp NOT NULL DEFAULT now(), + CONSTRAINT "PK_98419043dd804f54-9830ab99f8" PRIMARY KEY (id) + );` + ) + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS dataset_row ( + id uuid NOT NULL DEFAULT uuid_generate_v4(), + "datasetId" varchar NOT NULL, + "input" text NOT NULL, + "output" text NULL, + "updatedDate" timestamp NOT NULL DEFAULT now(), + CONSTRAINT "PK_98909027dd804f54-9840ab99f8" PRIMARY KEY (id) + );` + ) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE dataset`) + await queryRunner.query(`DROP TABLE dataset_row`) + } +} diff --git a/packages/server/src/database/migrations/postgres/1714808591644-AddEvaluator.ts b/packages/server/src/database/migrations/postgres/1714808591644-AddEvaluator.ts new file mode 100644 index 000000000..a228e0c8f --- /dev/null +++ b/packages/server/src/database/migrations/postgres/1714808591644-AddEvaluator.ts @@ -0,0 +1,21 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddEvaluator1714808591644 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS evaluator ( + id uuid NOT NULL DEFAULT uuid_generate_v4(), + "name" varchar NOT NULL, + "type" text NULL, + "config" text NULL, + "createdDate" timestamp NOT NULL DEFAULT now(), + "updatedDate" timestamp NOT NULL DEFAULT now(), + CONSTRAINT "PK_90019043dd804f54-9830ab11f8" PRIMARY KEY (id) + );` + ) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE evaluator`) + } +} diff --git a/packages/server/src/database/migrations/postgres/1733752119696-AddSeqNoToDatasetRow.ts b/packages/server/src/database/migrations/postgres/1733752119696-AddSeqNoToDatasetRow.ts new file mode 100644 index 000000000..a49a68816 --- /dev/null +++ b/packages/server/src/database/migrations/postgres/1733752119696-AddSeqNoToDatasetRow.ts @@ -0,0 +1,11 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddSeqNoToDatasetRow1733752119696 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "dataset_row" ADD COLUMN IF NOT EXISTS "sequence_no" integer DEFAULT -1;`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "dataset_row" DROP COLUMN "sequence_no";`) + } +} diff --git a/packages/server/src/database/migrations/postgres/1743758056188-FixOpenSourceAssistantTable.ts b/packages/server/src/database/migrations/postgres/1743758056188-FixOpenSourceAssistantTable.ts new file mode 100644 index 000000000..5400a1907 --- /dev/null +++ b/packages/server/src/database/migrations/postgres/1743758056188-FixOpenSourceAssistantTable.ts @@ -0,0 +1,24 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { Assistant } from '../../entities/Assistant' + +export class FixOpenSourceAssistantTable1743758056188 implements MigrationInterface { + name = 'FixOpenSourceAssistantTable1743758056188' + + public async up(queryRunner: QueryRunner): Promise { + const columnExists = await queryRunner.hasColumn('assistant', 'type') + if (!columnExists) { + await queryRunner.query(`ALTER TABLE "assistant" ADD COLUMN "type" TEXT;`) + await queryRunner.query(`UPDATE "assistant" SET "type" = 'OPENAI';`) + + const assistants: Assistant[] = await queryRunner.query(`SELECT * FROM "assistant";`) + for (let assistant of assistants) { + const details = JSON.parse(assistant.details) + if (!details?.id) await queryRunner.query(`UPDATE "assistant" SET "type" = 'CUSTOM' WHERE id = '${assistant.id}';`) + } + } + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "assistant" DROP COLUMN "type";`) + } +} diff --git a/packages/server/src/database/migrations/postgres/1744964560174-AddErrorToEvaluationRun.ts b/packages/server/src/database/migrations/postgres/1744964560174-AddErrorToEvaluationRun.ts new file mode 100644 index 000000000..9cb47a57e --- /dev/null +++ b/packages/server/src/database/migrations/postgres/1744964560174-AddErrorToEvaluationRun.ts @@ -0,0 +1,11 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddErrorToEvaluationRun1744964560174 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "evaluation_run" ADD COLUMN IF NOT EXISTS "errors" TEXT NULL DEFAULT '[]';`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "evaluation_run" DROP COLUMN "errors";`) + } +} diff --git a/packages/server/src/database/migrations/postgres/1748450230238-ModifyExecutionSessionIdFieldType.ts b/packages/server/src/database/migrations/postgres/1748450230238-ModifyExecutionSessionIdFieldType.ts new file mode 100644 index 000000000..43656ff72 --- /dev/null +++ b/packages/server/src/database/migrations/postgres/1748450230238-ModifyExecutionSessionIdFieldType.ts @@ -0,0 +1,13 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class ModifyExecutionSessionIdFieldType1748450230238 implements MigrationInterface { + name = 'ModifyExecutionSessionIdFieldType1748450230238' + + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "execution" ALTER COLUMN "sessionId" type varchar USING "sessionId"::varchar`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "execution" ALTER COLUMN "sessionId" type uuid USING "sessionId"::uuid`) + } +} diff --git a/packages/server/src/database/migrations/postgres/1754986480347-AddTextToSpeechToChatFlow.ts b/packages/server/src/database/migrations/postgres/1754986480347-AddTextToSpeechToChatFlow.ts new file mode 100644 index 000000000..67482ce78 --- /dev/null +++ b/packages/server/src/database/migrations/postgres/1754986480347-AddTextToSpeechToChatFlow.ts @@ -0,0 +1,11 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddTextToSpeechToChatFlow1754986480347 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "chat_flow" ADD COLUMN IF NOT EXISTS "textToSpeech" TEXT;`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "chat_flow" DROP COLUMN "textToSpeech";`) + } +} diff --git a/packages/server/src/database/migrations/postgres/1755066758601-ModifyChatflowType.ts b/packages/server/src/database/migrations/postgres/1755066758601-ModifyChatflowType.ts new file mode 100644 index 000000000..02c2c1250 --- /dev/null +++ b/packages/server/src/database/migrations/postgres/1755066758601-ModifyChatflowType.ts @@ -0,0 +1,21 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { EnumChatflowType } from '../../entities/ChatFlow' + +export class ModifyChatflowType1755066758601 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(` + UPDATE "chat_flow" SET "type" = '${EnumChatflowType.CHATFLOW}' WHERE "type" IS NULL OR "type" = ''; + `) + await queryRunner.query(` + ALTER TABLE "chat_flow" ALTER COLUMN "type" SET DEFAULT '${EnumChatflowType.CHATFLOW}'; + `) + await queryRunner.query(` + ALTER TABLE "chat_flow" ALTER COLUMN "type" TYPE VARCHAR(20); + `) + await queryRunner.query(` + ALTER TABLE "chat_flow" ALTER COLUMN "type" SET NOT NULL; + `) + } + + public async down(): Promise {} +} diff --git a/packages/server/src/database/migrations/postgres/1759419194331-AddTextToSpeechToChatFlow.ts b/packages/server/src/database/migrations/postgres/1759419194331-AddTextToSpeechToChatFlow.ts new file mode 100644 index 000000000..510a96b7c --- /dev/null +++ b/packages/server/src/database/migrations/postgres/1759419194331-AddTextToSpeechToChatFlow.ts @@ -0,0 +1,11 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddTextToSpeechToChatFlow1759419194331 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "chat_flow" ADD COLUMN IF NOT EXISTS "textToSpeech" TEXT;`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "chat_flow" DROP COLUMN "textToSpeech";`) + } +} diff --git a/packages/server/src/database/migrations/postgres/1759424903973-AddChatFlowNameIndex.ts b/packages/server/src/database/migrations/postgres/1759424903973-AddChatFlowNameIndex.ts new file mode 100644 index 000000000..c3da8c4e5 --- /dev/null +++ b/packages/server/src/database/migrations/postgres/1759424903973-AddChatFlowNameIndex.ts @@ -0,0 +1,13 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddChatFlowNameIndex1759424903973 implements MigrationInterface { + name = 'AddChatFlowNameIndex1759424903973' + + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(`CREATE INDEX IF NOT EXISTS "IDX_chatflow_name" ON "chat_flow" (substring("name" from 1 for 255))`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP INDEX IF EXISTS "IDX_chatflow_name"`) + } +} diff --git a/packages/server/src/database/migrations/postgres/index.ts b/packages/server/src/database/migrations/postgres/index.ts index ac8fef734..3dbca6147 100644 --- a/packages/server/src/database/migrations/postgres/index.ts +++ b/packages/server/src/database/migrations/postgres/index.ts @@ -18,9 +18,12 @@ import { AddUpsertHistoryEntity1709814301358 } from './1709814301358-AddUpsertHi import { FieldTypes1710497452584 } from './1710497452584-FieldTypes' import { AddLead1710832137905 } from './1710832137905-AddLead' import { AddLeadToChatMessage1711538016098 } from './1711538016098-AddLeadToChatMessage' -import { AddVectorStoreConfigToDocStore1715861032479 } from './1715861032479-AddVectorStoreConfigToDocStore' import { AddDocumentStore1711637331047 } from './1711637331047-AddDocumentStore' +import { AddEvaluation1714548873039 } from './1714548873039-AddEvaluation' +import { AddDatasets1714548903384 } from './1714548903384-AddDataset' import { AddAgentReasoningToChatMessage1714679514451 } from './1714679514451-AddAgentReasoningToChatMessage' +import { AddEvaluator1714808591644 } from './1714808591644-AddEvaluator' +import { AddVectorStoreConfigToDocStore1715861032479 } from './1715861032479-AddVectorStoreConfigToDocStore' import { AddTypeToChatFlow1716300000000 } from './1716300000000-AddTypeToChatFlow' import { AddApiKey1720230151480 } from './1720230151480-AddApiKey' import { AddActionToChatMessage1721078251523 } from './1721078251523-AddActionToChatMessage' @@ -28,7 +31,27 @@ import { AddCustomTemplate1725629836652 } from './1725629836652-AddCustomTemplat import { AddArtifactsToChatMessage1726156258465 } from './1726156258465-AddArtifactsToChatMessage' import { AddFollowUpPrompts1726666309552 } from './1726666309552-AddFollowUpPrompts' import { AddTypeToAssistant1733011290987 } from './1733011290987-AddTypeToAssistant' +import { AddSeqNoToDatasetRow1733752119696 } from './1733752119696-AddSeqNoToDatasetRow' import { AddExecutionEntity1738090872625 } from './1738090872625-AddExecutionEntity' +import { FixOpenSourceAssistantTable1743758056188 } from './1743758056188-FixOpenSourceAssistantTable' +import { AddErrorToEvaluationRun1744964560174 } from './1744964560174-AddErrorToEvaluationRun' +import { ModifyExecutionSessionIdFieldType1748450230238 } from './1748450230238-ModifyExecutionSessionIdFieldType' +import { AddTextToSpeechToChatFlow1754986480347 } from './1754986480347-AddTextToSpeechToChatFlow' +import { ModifyChatflowType1755066758601 } from './1755066758601-ModifyChatflowType' +import { AddTextToSpeechToChatFlow1759419194331 } from './1759419194331-AddTextToSpeechToChatFlow' +import { AddChatFlowNameIndex1759424903973 } from './1759424903973-AddChatFlowNameIndex' + +import { AddAuthTables1720230151482 } from '../../../enterprise/database/migrations/postgres/1720230151482-AddAuthTables' +import { AddWorkspace1720230151484 } from '../../../enterprise/database/migrations/postgres/1720230151484-AddWorkspace' +import { AddWorkspaceShared1726654922034 } from '../../../enterprise/database/migrations/postgres/1726654922034-AddWorkspaceShared' +import { AddWorkspaceIdToCustomTemplate1726655750383 } from '../../../enterprise/database/migrations/postgres/1726655750383-AddWorkspaceIdToCustomTemplate' +import { AddOrganization1727798417345 } from '../../../enterprise/database/migrations/postgres/1727798417345-AddOrganization' +import { LinkWorkspaceId1729130948686 } from '../../../enterprise/database/migrations/postgres/1729130948686-LinkWorkspaceId' +import { LinkOrganizationId1729133111652 } from '../../../enterprise/database/migrations/postgres/1729133111652-LinkOrganizationId' +import { AddSSOColumns1730519457880 } from '../../../enterprise/database/migrations/postgres/1730519457880-AddSSOColumns' +import { AddPersonalWorkspace1734074497540 } from '../../../enterprise/database/migrations/postgres/1734074497540-AddPersonalWorkspace' +import { RefactorEnterpriseDatabase1737076223692 } from '../../../enterprise/database/migrations/postgres/1737076223692-RefactorEnterpriseDatabase' +import { ExecutionLinkWorkspaceId1746862866554 } from '../../../enterprise/database/migrations/postgres/1746862866554-ExecutionLinkWorkspaceId' export const postgresMigrations = [ Init1693891895163, @@ -49,17 +72,39 @@ export const postgresMigrations = [ AddUpsertHistoryEntity1709814301358, AddFeedback1707213601923, FieldTypes1710497452584, + AddEvaluation1714548873039, + AddDatasets1714548903384, + AddEvaluator1714808591644, AddDocumentStore1711637331047, AddLead1710832137905, AddLeadToChatMessage1711538016098, AddAgentReasoningToChatMessage1714679514451, - AddTypeToChatFlow1716300000000, AddVectorStoreConfigToDocStore1715861032479, + AddTypeToChatFlow1716300000000, AddApiKey1720230151480, AddActionToChatMessage1721078251523, AddCustomTemplate1725629836652, AddArtifactsToChatMessage1726156258465, AddFollowUpPrompts1726666309552, AddTypeToAssistant1733011290987, - AddExecutionEntity1738090872625 + AddAuthTables1720230151482, + AddWorkspace1720230151484, + AddWorkspaceShared1726654922034, + AddWorkspaceIdToCustomTemplate1726655750383, + AddOrganization1727798417345, + LinkWorkspaceId1729130948686, + LinkOrganizationId1729133111652, + AddSSOColumns1730519457880, + AddSeqNoToDatasetRow1733752119696, + AddPersonalWorkspace1734074497540, + RefactorEnterpriseDatabase1737076223692, + AddExecutionEntity1738090872625, + FixOpenSourceAssistantTable1743758056188, + AddErrorToEvaluationRun1744964560174, + ExecutionLinkWorkspaceId1746862866554, + ModifyExecutionSessionIdFieldType1748450230238, + AddTextToSpeechToChatFlow1754986480347, + ModifyChatflowType1755066758601, + AddTextToSpeechToChatFlow1759419194331, + AddChatFlowNameIndex1759424903973 ] diff --git a/packages/server/src/database/migrations/sqlite/1714548873039-AddEvaluation.ts b/packages/server/src/database/migrations/sqlite/1714548873039-AddEvaluation.ts new file mode 100644 index 000000000..9b1da5eba --- /dev/null +++ b/packages/server/src/database/migrations/sqlite/1714548873039-AddEvaluation.ts @@ -0,0 +1,37 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddEvaluation1714548873039 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS "evaluation" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" varchar NOT NULL, + "chatflowId" text NOT NULL, + "chatflowName" text NOT NULL, + "datasetId" varchar NOT NULL, + "datasetName" varchar NOT NULL, + "additionalConfig" text, + "status" varchar NOT NULL, + "evaluationType" varchar, + "average_metrics" text, + "runDate" datetime NOT NULL DEFAULT (datetime('now')));` + ) + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS "evaluation_run" ( + "id" varchar PRIMARY KEY NOT NULL, + "evaluationId" text NOT NULL, + "input" text NOT NULL, + "expectedOutput" text NOT NULL, + "actualOutput" text NOT NULL, + "evaluators" text, + "llmEvaluators" TEXT DEFAULT NULL, + "metrics" text NULL, + "runDate" datetime NOT NULL DEFAULT (datetime('now')));` + ) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE evaluation`) + await queryRunner.query(`DROP TABLE evaluation_run`) + } +} diff --git a/packages/server/src/database/migrations/sqlite/1714548903384-AddDataset.ts b/packages/server/src/database/migrations/sqlite/1714548903384-AddDataset.ts new file mode 100644 index 000000000..b59abea14 --- /dev/null +++ b/packages/server/src/database/migrations/sqlite/1714548903384-AddDataset.ts @@ -0,0 +1,25 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddDatasets1714548903384 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS "dataset" ("id" varchar PRIMARY KEY NOT NULL, + "name" text NOT NULL, + "description" varchar, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')));` + ) + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS "dataset_row" ("id" varchar PRIMARY KEY NOT NULL, + "datasetId" text NOT NULL, + "input" text NOT NULL, + "output" text NOT NULL, + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')));` + ) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE dataset`) + await queryRunner.query(`DROP TABLE dataset_row`) + } +} diff --git a/packages/server/src/database/migrations/sqlite/1714808591644-AddEvaluator.ts b/packages/server/src/database/migrations/sqlite/1714808591644-AddEvaluator.ts new file mode 100644 index 000000000..af85cf271 --- /dev/null +++ b/packages/server/src/database/migrations/sqlite/1714808591644-AddEvaluator.ts @@ -0,0 +1,18 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddEvaluator1714808591644 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS "evaluator" ("id" varchar PRIMARY KEY NOT NULL, +"name" text NOT NULL, +"type" varchar, +"config" text, +"createdDate" datetime NOT NULL DEFAULT (datetime('now')), +"updatedDate" datetime NOT NULL DEFAULT (datetime('now')));` + ) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE evaluator`) + } +} diff --git a/packages/server/src/database/migrations/sqlite/1733752119696-AddSeqNoToDatasetRow.ts b/packages/server/src/database/migrations/sqlite/1733752119696-AddSeqNoToDatasetRow.ts new file mode 100644 index 000000000..8d1cb3560 --- /dev/null +++ b/packages/server/src/database/migrations/sqlite/1733752119696-AddSeqNoToDatasetRow.ts @@ -0,0 +1,11 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddSeqNoToDatasetRow1733752119696 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "dataset_row" ADD COLUMN "sequence_no" integer DEFAULT -1;`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "dataset_row" DROP COLUMN "sequence_no";`) + } +} diff --git a/packages/server/src/database/migrations/sqlite/1743758056188-FixOpenSourceAssistantTable.ts b/packages/server/src/database/migrations/sqlite/1743758056188-FixOpenSourceAssistantTable.ts new file mode 100644 index 000000000..61c611e2e --- /dev/null +++ b/packages/server/src/database/migrations/sqlite/1743758056188-FixOpenSourceAssistantTable.ts @@ -0,0 +1,28 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { Assistant } from '../../entities/Assistant' + +export async function fixOpenSourceAssistantTable(queryRunner: QueryRunner): Promise { + const columnExists = await queryRunner.hasColumn('assistant', 'type') + if (!columnExists) { + await queryRunner.query(`ALTER TABLE "assistant" ADD COLUMN "type" TEXT;`) + await queryRunner.query(`UPDATE "assistant" SET "type" = 'OPENAI';`) + + const assistants: Assistant[] = await queryRunner.query(`SELECT * FROM "assistant";`) + for (let assistant of assistants) { + const details = JSON.parse(assistant.details) + if (!details?.id) await queryRunner.query(`UPDATE "assistant" SET "type" = 'CUSTOM' WHERE id = '${assistant.id}';`) + } + } +} + +export class FixOpenSourceAssistantTable1743758056188 implements MigrationInterface { + name = 'FixOpenSourceAssistantTable1743758056188' + + public async up(queryRunner: QueryRunner): Promise { + await fixOpenSourceAssistantTable(queryRunner) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "assistant" DROP COLUMN "type";`) + } +} diff --git a/packages/server/src/database/migrations/sqlite/1744964560174-AddErrorToEvaluationRun.ts b/packages/server/src/database/migrations/sqlite/1744964560174-AddErrorToEvaluationRun.ts new file mode 100644 index 000000000..8f576aa1d --- /dev/null +++ b/packages/server/src/database/migrations/sqlite/1744964560174-AddErrorToEvaluationRun.ts @@ -0,0 +1,11 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddErrorToEvaluationRun1744964560174 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "evaluation_run" ADD COLUMN "errors" TEXT NULL DEFAULT '[]';`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "evaluation_run" DROP COLUMN "errors";`) + } +} diff --git a/packages/server/src/database/migrations/sqlite/1754986486669-AddTextToSpeechToChatFlow.ts b/packages/server/src/database/migrations/sqlite/1754986486669-AddTextToSpeechToChatFlow.ts new file mode 100644 index 000000000..b37d9f0d9 --- /dev/null +++ b/packages/server/src/database/migrations/sqlite/1754986486669-AddTextToSpeechToChatFlow.ts @@ -0,0 +1,11 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddTextToSpeechToChatFlow1754986486669 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "chat_flow" ADD COLUMN "textToSpeech" TEXT;`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "chat_flow" DROP COLUMN "textToSpeech";`) + } +} diff --git a/packages/server/src/database/migrations/sqlite/1755066758601-ModifyChatflowType.ts b/packages/server/src/database/migrations/sqlite/1755066758601-ModifyChatflowType.ts new file mode 100644 index 000000000..9af5602e7 --- /dev/null +++ b/packages/server/src/database/migrations/sqlite/1755066758601-ModifyChatflowType.ts @@ -0,0 +1,40 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { EnumChatflowType } from '../../entities/ChatFlow' + +export class ModifyChatflowType1755066758601 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(` + CREATE TABLE "temp_chat_flow" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" varchar NOT NULL, + "flowData" text NOT NULL, + "deployed" boolean, + "isPublic" boolean, + "apikeyid" varchar, + "chatbotConfig" text, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "apiConfig" TEXT, + "analytic" TEXT, + "category" TEXT, + "speechToText" TEXT, + "type" VARCHAR(20) NOT NULL DEFAULT '${EnumChatflowType.CHATFLOW}', + "workspaceId" TEXT, + "followUpPrompts" TEXT, + FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id") + ); + `) + + await queryRunner.query(` + INSERT INTO "temp_chat_flow" ("id", "name", "flowData", "deployed", "isPublic", "apikeyid", "chatbotConfig", "createdDate", "updatedDate", "apiConfig", "analytic", "category", "speechToText", "type", "workspaceId", "followUpPrompts") + SELECT "id", "name", "flowData", "deployed", "isPublic", "apikeyid", "chatbotConfig", "createdDate", "updatedDate", "apiConfig", "analytic", "category", "speechToText", + CASE WHEN "type" IS NULL OR "type" = '' THEN '${EnumChatflowType.CHATFLOW}' ELSE "type" END, "workspaceId", "followUpPrompts" FROM "chat_flow"; + `) + + await queryRunner.query(`DROP TABLE "chat_flow";`) + + await queryRunner.query(`ALTER TABLE "temp_chat_flow" RENAME TO "chat_flow";`) + } + + public async down(): Promise {} +} diff --git a/packages/server/src/database/migrations/sqlite/1759419136055-AddTextToSpeechToChatFlow.ts b/packages/server/src/database/migrations/sqlite/1759419136055-AddTextToSpeechToChatFlow.ts new file mode 100644 index 000000000..382253819 --- /dev/null +++ b/packages/server/src/database/migrations/sqlite/1759419136055-AddTextToSpeechToChatFlow.ts @@ -0,0 +1,22 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddTextToSpeechToChatFlow1759419136055 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + const tableInfo = await queryRunner.query(`PRAGMA table_info("chat_flow");`) + const columnExists = tableInfo.some((column: any) => column.name === 'textToSpeech') + if (!columnExists) { + await queryRunner.query(`ALTER TABLE "chat_flow" ADD COLUMN "textToSpeech" TEXT;`) + } + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`CREATE TABLE "chat_flow_temp" AS SELECT * FROM "chat_flow" WHERE 1=0;`) + await queryRunner.query(` + INSERT INTO "chat_flow_temp" + SELECT id, name, flowData, deployed, isPublic, apikeyid, chatbotConfig, apiConfig, analytic, speechToText, followUpPrompts, category, type, createdDate, updatedDate, workspaceId + FROM "chat_flow"; + `) + await queryRunner.query(`DROP TABLE "chat_flow";`) + await queryRunner.query(`ALTER TABLE "chat_flow_temp" RENAME TO "chat_flow";`) + } +} diff --git a/packages/server/src/database/migrations/sqlite/1759424923093-AddChatFlowNameIndex.ts b/packages/server/src/database/migrations/sqlite/1759424923093-AddChatFlowNameIndex.ts new file mode 100644 index 000000000..436707ca0 --- /dev/null +++ b/packages/server/src/database/migrations/sqlite/1759424923093-AddChatFlowNameIndex.ts @@ -0,0 +1,13 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddChatFlowNameIndex1759424923093 implements MigrationInterface { + name = 'AddChatFlowNameIndex1759424923093' + + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(`CREATE INDEX IF NOT EXISTS "IDX_chatflow_name" ON "chat_flow" (substr(name, 1, 255))`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP INDEX IF EXISTS "IDX_chatflow_name"`) + } +} diff --git a/packages/server/src/database/migrations/sqlite/index.ts b/packages/server/src/database/migrations/sqlite/index.ts index 4ebcbb921..cbed0760c 100644 --- a/packages/server/src/database/migrations/sqlite/index.ts +++ b/packages/server/src/database/migrations/sqlite/index.ts @@ -17,17 +17,39 @@ import { AddFeedback1707213619308 } from './1707213619308-AddFeedback' import { AddUpsertHistoryEntity1709814301358 } from './1709814301358-AddUpsertHistoryEntity' import { AddLead1710832117612 } from './1710832117612-AddLead' import { AddLeadToChatMessage1711537986113 } from './1711537986113-AddLeadToChatMessage' -import { AddVectorStoreConfigToDocStore1715861032479 } from './1715861032479-AddVectorStoreConfigToDocStore' import { AddDocumentStore1711637331047 } from './1711637331047-AddDocumentStore' +import { AddEvaluation1714548873039 } from './1714548873039-AddEvaluation' +import { AddDatasets1714548903384 } from './1714548903384-AddDataset' import { AddAgentReasoningToChatMessage1714679514451 } from './1714679514451-AddAgentReasoningToChatMessage' +import { AddEvaluator1714808591644 } from './1714808591644-AddEvaluator' +import { AddVectorStoreConfigToDocStore1715861032479 } from './1715861032479-AddVectorStoreConfigToDocStore' import { AddTypeToChatFlow1716300000000 } from './1716300000000-AddTypeToChatFlow' import { AddApiKey1720230151480 } from './1720230151480-AddApiKey' import { AddActionToChatMessage1721078251523 } from './1721078251523-AddActionToChatMessage' -import { AddArtifactsToChatMessage1726156258465 } from './1726156258465-AddArtifactsToChatMessage' import { AddCustomTemplate1725629836652 } from './1725629836652-AddCustomTemplate' +import { AddArtifactsToChatMessage1726156258465 } from './1726156258465-AddArtifactsToChatMessage' import { AddFollowUpPrompts1726666294213 } from './1726666294213-AddFollowUpPrompts' import { AddTypeToAssistant1733011290987 } from './1733011290987-AddTypeToAssistant' +import { AddSeqNoToDatasetRow1733752119696 } from './1733752119696-AddSeqNoToDatasetRow' import { AddExecutionEntity1738090872625 } from './1738090872625-AddExecutionEntity' +import { FixOpenSourceAssistantTable1743758056188 } from './1743758056188-FixOpenSourceAssistantTable' +import { AddErrorToEvaluationRun1744964560174 } from './1744964560174-AddErrorToEvaluationRun' +import { AddTextToSpeechToChatFlow1754986486669 } from './1754986486669-AddTextToSpeechToChatFlow' +import { ModifyChatflowType1755066758601 } from './1755066758601-ModifyChatflowType' +import { AddTextToSpeechToChatFlow1759419136055 } from './1759419136055-AddTextToSpeechToChatFlow' +import { AddChatFlowNameIndex1759424923093 } from './1759424923093-AddChatFlowNameIndex' + +import { AddAuthTables1720230151482 } from '../../../enterprise/database/migrations/sqlite/1720230151482-AddAuthTables' +import { AddWorkspace1720230151484 } from '../../../enterprise/database/migrations/sqlite/1720230151484-AddWorkspace' +import { AddWorkspaceShared1726654922034 } from '../../../enterprise/database/migrations/sqlite/1726654922034-AddWorkspaceShared' +import { AddWorkspaceIdToCustomTemplate1726655750383 } from '../../../enterprise/database/migrations/sqlite/1726655750383-AddWorkspaceIdToCustomTemplate' +import { AddOrganization1727798417345 } from '../../../enterprise/database/migrations/sqlite/1727798417345-AddOrganization' +import { LinkWorkspaceId1729130948686 } from '../../../enterprise/database/migrations/sqlite/1729130948686-LinkWorkspaceId' +import { LinkOrganizationId1729133111652 } from '../../../enterprise/database/migrations/sqlite/1729133111652-LinkOrganizationId' +import { AddSSOColumns1730519457880 } from '../../../enterprise/database/migrations/sqlite/1730519457880-AddSSOColumns' +import { AddPersonalWorkspace1734074497540 } from '../../../enterprise/database/migrations/sqlite/1734074497540-AddPersonalWorkspace' +import { RefactorEnterpriseDatabase1737076223692 } from '../../../enterprise/database/migrations/sqlite/1737076223692-RefactorEnterpriseDatabase' +import { ExecutionLinkWorkspaceId1746862866554 } from '../../../enterprise/database/migrations/sqlite/1746862866554-ExecutionLinkWorkspaceId' export const sqliteMigrations = [ Init1693835579790, @@ -46,18 +68,39 @@ export const sqliteMigrations = [ AddFileUploadsToChatMessage1701788586491, AddSpeechToText1706364937060, AddUpsertHistoryEntity1709814301358, + AddEvaluation1714548873039, + AddDatasets1714548903384, + AddEvaluator1714808591644, AddFeedback1707213619308, AddDocumentStore1711637331047, AddLead1710832117612, AddLeadToChatMessage1711537986113, AddAgentReasoningToChatMessage1714679514451, - AddTypeToChatFlow1716300000000, AddVectorStoreConfigToDocStore1715861032479, + AddTypeToChatFlow1716300000000, AddApiKey1720230151480, AddActionToChatMessage1721078251523, AddArtifactsToChatMessage1726156258465, - AddCustomTemplate1725629836652, AddFollowUpPrompts1726666294213, AddTypeToAssistant1733011290987, - AddExecutionEntity1738090872625 + AddCustomTemplate1725629836652, + AddAuthTables1720230151482, + AddWorkspace1720230151484, + AddWorkspaceShared1726654922034, + AddWorkspaceIdToCustomTemplate1726655750383, + AddOrganization1727798417345, + LinkWorkspaceId1729130948686, + LinkOrganizationId1729133111652, + AddSSOColumns1730519457880, + AddSeqNoToDatasetRow1733752119696, + AddPersonalWorkspace1734074497540, + RefactorEnterpriseDatabase1737076223692, + AddExecutionEntity1738090872625, + FixOpenSourceAssistantTable1743758056188, + AddErrorToEvaluationRun1744964560174, + ExecutionLinkWorkspaceId1746862866554, + AddTextToSpeechToChatFlow1754986486669, + ModifyChatflowType1755066758601, + AddTextToSpeechToChatFlow1759419136055, + AddChatFlowNameIndex1759424923093 ] diff --git a/packages/server/src/enterprise/Interface.Enterprise.ts b/packages/server/src/enterprise/Interface.Enterprise.ts new file mode 100644 index 000000000..5dd4384e0 --- /dev/null +++ b/packages/server/src/enterprise/Interface.Enterprise.ts @@ -0,0 +1,135 @@ +import { z } from 'zod' + +export enum UserStatus { + INVITED = 'invited', + DISABLED = 'disabled', + ACTIVE = 'active' +} + +export class IUser { + id: string + email: string + name: string + credential: string + status: UserStatus + tempToken: string + tokenExpiry?: Date + role: string + lastLogin: Date + activeWorkspaceId: string + loginMode?: string + activeOrganizationId?: string +} + +export interface IWorkspaceUser { + id: string + workspaceId: string + userId: string + role: string +} + +export interface IWorkspaceShared { + id: string + workspaceId: string + sharedItemId: string + itemType: string + createdDate: Date + updatedDate: Date +} + +export interface ILoginActivity { + id: string + username: string + activityCode: number + message: string + loginMode: string + attemptedDateTime: Date +} + +export enum LoginActivityCode { + LOGIN_SUCCESS = 0, + LOGOUT_SUCCESS = 1, + UNKNOWN_USER = -1, + INCORRECT_CREDENTIAL = -2, + USER_DISABLED = -3, + NO_ASSIGNED_WORKSPACE = -4, + INVALID_LOGIN_MODE = -5, + REGISTRATION_PENDING = -6, + UNKNOWN_ERROR = -99 +} + +export type IAssignedWorkspace = { id: string; name: string; role: string; organizationId: string } +export type LoggedInUser = { + id: string + email: string + name: string + roleId: string + activeOrganizationId: string + activeOrganizationSubscriptionId: string + activeOrganizationCustomerId: string + activeOrganizationProductId: string + isOrganizationAdmin: boolean + activeWorkspaceId: string + activeWorkspace: string + assignedWorkspaces: IAssignedWorkspace[] + permissions?: string[] + features?: Record + ssoRefreshToken?: string + ssoToken?: string + ssoProvider?: string +} + +export enum ErrorMessage { + INVALID_MISSING_TOKEN = 'Invalid or Missing token', + TOKEN_EXPIRED = 'Token Expired', + REFRESH_TOKEN_EXPIRED = 'Refresh Token Expired', + FORBIDDEN = 'Forbidden', + UNKNOWN_USER = 'Unknown Username or Password', + INCORRECT_PASSWORD = 'Incorrect Password', + INACTIVE_USER = 'Inactive User', + INVITED_USER = 'User Invited, but has not registered', + INVALID_WORKSPACE = 'No Workspace Assigned', + UNKNOWN_ERROR = 'Unknown Error' +} + +// IMPORTANT: update the schema on the client side as well +// packages/ui/src/views/organization/index.jsx +export const OrgSetupSchema = z + .object({ + orgName: z.string().min(1, 'Organization name is required'), + username: z.string().min(1, 'Name is required'), + email: z.string().min(1, 'Email is required').email('Invalid email address'), + password: z + .string() + .min(8, 'Password must be at least 8 characters') + .regex(/[a-z]/, 'Password must contain at least one lowercase letter') + .regex(/[A-Z]/, 'Password must contain at least one uppercase letter') + .regex(/\d/, 'Password must contain at least one digit') + .regex(/[^a-zA-Z0-9]/, 'Password must contain at least one special character'), + confirmPassword: z.string().min(1, 'Confirm Password is required') + }) + .refine((data) => data.password === data.confirmPassword, { + message: "Passwords don't match", + path: ['confirmPassword'] + }) + +// IMPORTANT: when updating this schema, update the schema on the server as well +// packages/ui/src/views/auth/register.jsx +export const RegisterUserSchema = z + .object({ + username: z.string().min(1, 'Name is required'), + email: z.string().min(1, 'Email is required').email('Invalid email address'), + password: z + .string() + .min(8, 'Password must be at least 8 characters') + .regex(/[a-z]/, 'Password must contain at least one lowercase letter') + .regex(/[A-Z]/, 'Password must contain at least one uppercase letter') + .regex(/\d/, 'Password must contain at least one digit') + .regex(/[^a-zA-Z0-9]/, 'Password must contain at least one special character'), + confirmPassword: z.string().min(1, 'Confirm Password is required'), + token: z.string().min(1, 'Invite Code is required') + }) + .refine((data) => data.password === data.confirmPassword, { + message: "Passwords don't match", + path: ['confirmPassword'] + }) diff --git a/packages/server/src/enterprise/LICENSE.md b/packages/server/src/enterprise/LICENSE.md new file mode 100644 index 000000000..462c48dc6 --- /dev/null +++ b/packages/server/src/enterprise/LICENSE.md @@ -0,0 +1,46 @@ +The FlowiseAI Inc Commercial License (the "Commercial License") +Copyright (c) 2023-present FlowiseAI, Inc + +With regard to the FlowiseAI Inc Software: + +This software and associated documentation files (the "Software") may only be +used in production, if you (and any entity that you represent) have agreed to, +and are in compliance with, the FlowiseAI Inc Subscription Terms available +at https://flowiseai.com/terms, or other agreements governing +the use of the Software, as mutually agreed by you and FlowiseAI Inc, Inc ("FlowiseAI"), +and otherwise have a valid FlowiseAI Inc Enterprise Edition subscription ("Commercial Subscription") +for the correct number of hosts as defined in the "Commercial Terms ("Hosts"). Subject to the foregoing sentence, +you are free to modify this Software and publish patches to the Software. You agree +that FlowiseAI Inc and/or its licensors (as applicable) retain all right, title and interest in +and to all such modifications and/or patches, and all such modifications and/or +patches may only be used, copied, modified, displayed, distributed, or otherwise +exploited with a valid Commercial Subscription for the correct number of hosts. +Notwithstanding the foregoing, you may copy and modify the Software for development +and testing purposes, without requiring a subscription. You agree that FlowiseAI Inc and/or +its licensors (as applicable) retain all right, title and interest in and to all such +modifications. You are not granted any other rights beyond what is expressly stated herein. +Subject to the foregoing, it is forbidden to copy, merge, publish, distribute, sublicense, +and/or sell the Software. + +This Commercial License applies only to the part of this Software that is not distributed under +the Apache 2.0 license. The Open Source version of Flowise is licensed under the Apache License, Version 2.0. +Unauthorized copying, modification, distribution, or use of the Enterprise and Cloud versions +is strictly prohibited without a valid license agreement from FlowiseAI, Inc. + +For information about licensing of the Enterprise and Cloud versions, please contact: +security@flowiseai.com + +The full text of this Commercial License shall +be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +For all third party components incorporated into the FlowiseAI Inc Software, those +components are licensed under the original license provided by the owner of the +applicable component. diff --git a/packages/server/src/enterprise/controllers/account.controller.ts b/packages/server/src/enterprise/controllers/account.controller.ts new file mode 100644 index 000000000..9c360a6a9 --- /dev/null +++ b/packages/server/src/enterprise/controllers/account.controller.ts @@ -0,0 +1,136 @@ +import { Request, Response, NextFunction } from 'express' +import { StatusCodes } from 'http-status-codes' +import { AccountService } from '../services/account.service' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' + +export class AccountController { + public async register(req: Request, res: Response, next: NextFunction) { + try { + const accountService = new AccountService() + const data = await accountService.register(req.body) + return res.status(StatusCodes.CREATED).json(data) + } catch (error) { + next(error) + } + } + + public async invite(req: Request, res: Response, next: NextFunction) { + try { + const accountService = new AccountService() + const data = await accountService.invite(req.body, req.user) + return res.status(StatusCodes.CREATED).json(data) + } catch (error) { + next(error) + } + } + + public async login(req: Request, res: Response, next: NextFunction) { + try { + const accountService = new AccountService() + const data = await accountService.login(req.body) + return res.status(StatusCodes.CREATED).json(data) + } catch (error) { + next(error) + } + } + + public async verify(req: Request, res: Response, next: NextFunction) { + try { + const accountService = new AccountService() + const data = await accountService.verify(req.body) + return res.status(StatusCodes.CREATED).json(data) + } catch (error) { + next(error) + } + } + + public async resendVerificationEmail(req: Request, res: Response, next: NextFunction) { + try { + const accountService = new AccountService() + const data = await accountService.resendVerificationEmail(req.body) + return res.status(StatusCodes.CREATED).json(data) + } catch (error) { + next(error) + } + } + + public async forgotPassword(req: Request, res: Response, next: NextFunction) { + try { + const accountService = new AccountService() + const data = await accountService.forgotPassword(req.body) + return res.status(StatusCodes.CREATED).json(data) + } catch (error) { + next(error) + } + } + + public async resetPassword(req: Request, res: Response, next: NextFunction) { + try { + const accountService = new AccountService() + const data = await accountService.resetPassword(req.body) + return res.status(StatusCodes.CREATED).json(data) + } catch (error) { + next(error) + } + } + + public async createStripeCustomerPortalSession(req: Request, res: Response, next: NextFunction) { + try { + const { url: portalSessionUrl } = await getRunningExpressApp().identityManager.createStripeCustomerPortalSession(req) + return res.status(StatusCodes.OK).json({ url: portalSessionUrl }) + } catch (error) { + next(error) + } + } + + public async logout(req: Request, res: Response, next: NextFunction) { + try { + if (req.user) { + const accountService = new AccountService() + await accountService.logout(req.user) + if (req.isAuthenticated()) { + req.logout((err) => { + if (err) { + return res.status(500).json({ message: 'Logout failed' }) + } + req.session.destroy((err) => { + if (err) { + return res.status(500).json({ message: 'Failed to destroy session' }) + } + }) + }) + } else { + // For JWT-based users (owner, org_admin) + res.clearCookie('connect.sid') // Clear the session cookie + res.clearCookie('token') // Clear the JWT cookie + res.clearCookie('refreshToken') // Clear the JWT cookie + return res.redirect('/login') // Redirect to the login page + } + } + return res.status(200).json({ message: 'logged_out', redirectTo: `/login` }) + } catch (error) { + next(error) + } + } + + public async getBasicAuth(req: Request, res: Response) { + if (process.env.FLOWISE_USERNAME && process.env.FLOWISE_PASSWORD) { + return res.status(StatusCodes.OK).json({ + isUsernamePasswordSet: true + }) + } else { + return res.status(StatusCodes.OK).json({ + isUsernamePasswordSet: false + }) + } + } + + public async checkBasicAuth(req: Request, res: Response) { + const { username, password } = req.body + if (username === process.env.FLOWISE_USERNAME && password === process.env.FLOWISE_PASSWORD) { + return res.json({ message: 'Authentication successful' }) + } else { + return res.json({ message: 'Authentication failed' }) + } + } +} diff --git a/packages/server/src/enterprise/controllers/audit/index.ts b/packages/server/src/enterprise/controllers/audit/index.ts new file mode 100644 index 000000000..6c95ca310 --- /dev/null +++ b/packages/server/src/enterprise/controllers/audit/index.ts @@ -0,0 +1,33 @@ +import { NextFunction, Request, Response } from 'express' +import auditService from '../../services/audit' +import { InternalFlowiseError } from '../../../errors/internalFlowiseError' +import { StatusCodes } from 'http-status-codes' + +const fetchLoginActivity = async (req: Request, res: Response, next: NextFunction) => { + try { + if (typeof req.body === 'undefined') { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: auditService.fetchLoginHistory - body not provided!`) + } + const apiResponse = await auditService.fetchLoginActivity(req.body) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const deleteLoginActivity = async (req: Request, res: Response, next: NextFunction) => { + try { + if (typeof req.body === 'undefined') { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: auditService.deleteLoginHistory - body not provided!`) + } + const apiResponse = await auditService.deleteLoginActivity(req.body) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +export default { + fetchLoginActivity, + deleteLoginActivity +} diff --git a/packages/server/src/enterprise/controllers/auth/index.ts b/packages/server/src/enterprise/controllers/auth/index.ts new file mode 100644 index 000000000..304b7eee6 --- /dev/null +++ b/packages/server/src/enterprise/controllers/auth/index.ts @@ -0,0 +1,28 @@ +import { NextFunction, Request, Response } from 'express' +import { getRunningExpressApp } from '../../../utils/getRunningExpressApp' + +const getAllPermissions = async (req: Request, res: Response, next: NextFunction) => { + try { + const appServer = getRunningExpressApp() + return res.json(appServer.identityManager.getPermissions()) + } catch (error) { + next(error) + } +} + +const ssoSuccess = async (req: Request, res: Response, next: NextFunction) => { + try { + const appServer = getRunningExpressApp() + const ssoToken = req.query.token as string + const user = await appServer.cachePool.getSSOTokenCache(ssoToken) + if (!user) return res.status(401).json({ message: 'Invalid or expired SSO token' }) + await appServer.cachePool.deleteSSOTokenCache(ssoToken) + return res.json(user) + } catch (error) { + next(error) + } +} +export default { + getAllPermissions, + ssoSuccess +} diff --git a/packages/server/src/enterprise/controllers/login-method.controller.ts b/packages/server/src/enterprise/controllers/login-method.controller.ts new file mode 100644 index 000000000..130e89580 --- /dev/null +++ b/packages/server/src/enterprise/controllers/login-method.controller.ts @@ -0,0 +1,145 @@ +import { NextFunction, Request, Response } from 'express' +import { StatusCodes } from 'http-status-codes' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { Platform } from '../../Interface' +import { GeneralErrorMessage } from '../../utils/constants' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { LoginMethod, LoginMethodStatus } from '../database/entities/login-method.entity' +import { LoginMethodErrorMessage, LoginMethodService } from '../services/login-method.service' +import { OrganizationService } from '../services/organization.service' +import Auth0SSO from '../sso/Auth0SSO' +import AzureSSO from '../sso/AzureSSO' +import GithubSSO from '../sso/GithubSSO' +import GoogleSSO from '../sso/GoogleSSO' +import { decrypt } from '../utils/encryption.util' + +export class LoginMethodController { + public async create(req: Request, res: Response, next: NextFunction) { + try { + const loginMethodService = new LoginMethodService() + const loginMethod = await loginMethodService.createLoginMethod(req.body) + return res.status(StatusCodes.CREATED).json(loginMethod) + } catch (error) { + next(error) + } + } + + public async defaultMethods(req: Request, res: Response, next: NextFunction) { + let queryRunner + try { + queryRunner = getRunningExpressApp().AppDataSource.createQueryRunner() + await queryRunner.connect() + let organizationId + if (getRunningExpressApp().identityManager.getPlatformType() === Platform.CLOUD) { + organizationId = undefined + } else if (getRunningExpressApp().identityManager.getPlatformType() === Platform.ENTERPRISE) { + const organizationService = new OrganizationService() + const organizations = await organizationService.readOrganization(queryRunner) + if (organizations.length > 0) { + organizationId = organizations[0].id + } else { + return res.status(StatusCodes.OK).json({}) + } + } else { + return res.status(StatusCodes.OK).json({}) + } + const loginMethodService = new LoginMethodService() + + const providers: string[] = [] + + let loginMethod = await loginMethodService.readLoginMethodByOrganizationId(organizationId, queryRunner) + if (loginMethod) { + for (let method of loginMethod) { + if (method.status === LoginMethodStatus.ENABLE) providers.push(method.name) + } + } + return res.status(StatusCodes.OK).json({ providers: providers }) + } catch (error) { + next(error) + } finally { + if (queryRunner) await queryRunner.release() + } + } + + public async read(req: Request, res: Response, next: NextFunction) { + let queryRunner + try { + queryRunner = getRunningExpressApp().AppDataSource.createQueryRunner() + await queryRunner.connect() + const query = req.query as Partial + const loginMethodService = new LoginMethodService() + + const loginMethodConfig = { + providers: [], + callbacks: [ + { providerName: 'azure', callbackURL: AzureSSO.getCallbackURL() }, + { providerName: 'google', callbackURL: GoogleSSO.getCallbackURL() }, + { providerName: 'auth0', callbackURL: Auth0SSO.getCallbackURL() }, + { providerName: 'github', callbackURL: GithubSSO.getCallbackURL() } + ] + } + let loginMethod: any + if (query.id) { + loginMethod = await loginMethodService.readLoginMethodById(query.id, queryRunner) + if (!loginMethod) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, LoginMethodErrorMessage.LOGIN_METHOD_NOT_FOUND) + loginMethod.config = JSON.parse(await decrypt(loginMethod.config)) + } else if (query.organizationId) { + loginMethod = await loginMethodService.readLoginMethodByOrganizationId(query.organizationId, queryRunner) + + for (let method of loginMethod) { + method.config = JSON.parse(await decrypt(method.config)) + } + loginMethodConfig.providers = loginMethod + } else { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, GeneralErrorMessage.UNHANDLED_EDGE_CASE) + } + return res.status(StatusCodes.OK).json(loginMethodConfig) + } catch (error) { + next(error) + } finally { + if (queryRunner) await queryRunner.release() + } + } + public async update(req: Request, res: Response, next: NextFunction) { + try { + const loginMethodService = new LoginMethodService() + const loginMethod = await loginMethodService.createOrUpdateConfig(req.body) + if (loginMethod?.status === 'OK' && loginMethod?.organizationId) { + const appServer = getRunningExpressApp() + let providers: any[] = req.body.providers + providers.map((provider: any) => { + const identityManager = appServer.identityManager + if (provider.config.clientID) { + provider.config.configEnabled = provider.status === LoginMethodStatus.ENABLE + identityManager.initializeSsoProvider(appServer.app, provider.providerName, provider.config) + } + }) + } + return res.status(StatusCodes.OK).json(loginMethod) + } catch (error) { + next(error) + } + } + public async testConfig(req: Request, res: Response, next: NextFunction) { + try { + const providers = req.body.providers + if (req.body.providerName === 'azure') { + const response = await AzureSSO.testSetup(providers[0].config) + return res.json(response) + } else if (req.body.providerName === 'google') { + const response = await GoogleSSO.testSetup(providers[0].config) + return res.json(response) + } else if (req.body.providerName === 'auth0') { + const response = await Auth0SSO.testSetup(providers[0].config) + return res.json(response) + } else if (req.body.providerName === 'github') { + const response = await GithubSSO.testSetup(providers[0].config) + return res.json(response) + } else { + return res.json({ error: 'Provider not supported' }) + } + } catch (error) { + next(error) + } + } +} diff --git a/packages/server/src/enterprise/controllers/organization-user.controller.ts b/packages/server/src/enterprise/controllers/organization-user.controller.ts new file mode 100644 index 000000000..466c6ee1f --- /dev/null +++ b/packages/server/src/enterprise/controllers/organization-user.controller.ts @@ -0,0 +1,146 @@ +import { NextFunction, Request, Response } from 'express' +import { StatusCodes } from 'http-status-codes' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { GeneralErrorMessage } from '../../utils/constants' +import { checkUsageLimit } from '../../utils/quotaUsage' +import { OrganizationUser } from '../database/entities/organization-user.entity' +import { Organization } from '../database/entities/organization.entity' + +type OrganizationUserQuery = Partial> + +import { QueryRunner } from 'typeorm' +import { Platform } from '../../Interface' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { GeneralRole } from '../database/entities/role.entity' +import { User, UserStatus } from '../database/entities/user.entity' +import { WorkspaceUser } from '../database/entities/workspace-user.entity' +import { OrganizationUserService } from '../services/organization-user.service' +import { RoleService } from '../services/role.service' +import { WorkspaceService } from '../services/workspace.service' + +export class OrganizationUserController { + public async create(req: Request, res: Response, next: NextFunction) { + try { + const organizationUserservice = new OrganizationUserService() + const totalOrgUsers = await organizationUserservice.readOrgUsersCountByOrgId(req.body.organizationId) + const subscriptionId = req.user?.activeOrganizationSubscriptionId || '' + await checkUsageLimit('users', subscriptionId, getRunningExpressApp().usageCacheManager, totalOrgUsers + 1) + const newOrganizationUser = await organizationUserservice.createOrganizationUser(req.body) + return res.status(StatusCodes.CREATED).json(newOrganizationUser) + } catch (error) { + next(error) + } + } + + public async read(req: Request, res: Response, next: NextFunction) { + let queryRunner + try { + queryRunner = getRunningExpressApp().AppDataSource.createQueryRunner() + await queryRunner.connect() + const query = req.query as OrganizationUserQuery + const organizationUserservice = new OrganizationUserService() + + let organizationUser: + | { + organization: Organization + organizationUser: OrganizationUser | null + } + | OrganizationUser + | null + | OrganizationUser[] + | (OrganizationUser & { + roleCount: number + })[] + if (query.organizationId && query.userId) { + organizationUser = await organizationUserservice.readOrganizationUserByOrganizationIdUserId( + query.organizationId, + query.userId, + queryRunner + ) + } else if (query.organizationId && query.roleId) { + organizationUser = await organizationUserservice.readOrganizationUserByOrganizationIdRoleId( + query.organizationId, + query.roleId, + queryRunner + ) + } else if (query.organizationId) { + organizationUser = await organizationUserservice.readOrganizationUserByOrganizationId(query.organizationId, queryRunner) + } else if (query.userId) { + organizationUser = await organizationUserservice.readOrganizationUserByUserId(query.userId, queryRunner) + } else { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, GeneralErrorMessage.UNHANDLED_EDGE_CASE) + } + + return res.status(StatusCodes.OK).json(organizationUser) + } catch (error) { + next(error) + } finally { + if (queryRunner) await queryRunner.release() + } + } + + public async update(req: Request, res: Response, next: NextFunction) { + try { + const organizationUserService = new OrganizationUserService() + const organizationUser = await organizationUserService.updateOrganizationUser(req.body) + return res.status(StatusCodes.OK).json(organizationUser) + } catch (error) { + next(error) + } + } + + public async delete(req: Request, res: Response, next: NextFunction) { + let queryRunner: QueryRunner | undefined + try { + queryRunner = getRunningExpressApp().AppDataSource.createQueryRunner() + const currentPlatform = getRunningExpressApp().identityManager.getPlatformType() + await queryRunner.connect() + const query = req.query as Partial + if (!query.organizationId) { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, 'Organization ID is required') + } + if (!query.userId) { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, 'User ID is required') + } + + const organizationUserService = new OrganizationUserService() + const workspaceService = new WorkspaceService() + const roleService = new RoleService() + + let organizationUser: OrganizationUser + await queryRunner.startTransaction() + if (currentPlatform === Platform.ENTERPRISE) { + const personalRole = await roleService.readGeneralRoleByName(GeneralRole.PERSONAL_WORKSPACE, queryRunner) + const personalWorkspaces = await queryRunner.manager.findBy(WorkspaceUser, { + userId: query.userId, + roleId: personalRole.id + }) + if (personalWorkspaces.length === 1) + // delete personal workspace + await workspaceService.deleteWorkspaceById(queryRunner, personalWorkspaces[0].workspaceId) + // remove user from other workspces + organizationUser = await organizationUserService.deleteOrganizationUser(queryRunner, query.organizationId, query.userId) + // soft delete user because they might workspace might created by them + const deleteUser = await queryRunner.manager.findOneBy(User, { id: query.userId }) + if (!deleteUser) throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, GeneralErrorMessage.UNHANDLED_EDGE_CASE) + deleteUser.name = UserStatus.DELETED + deleteUser.email = `deleted_${deleteUser.id}_${Date.now()}@deleted.flowise` + deleteUser.status = UserStatus.DELETED + deleteUser.credential = null + deleteUser.tokenExpiry = null + deleteUser.tempToken = null + await queryRunner.manager.save(User, deleteUser) + } else { + organizationUser = await organizationUserService.deleteOrganizationUser(queryRunner, query.organizationId, query.userId) + } + + await queryRunner.commitTransaction() + return res.status(StatusCodes.OK).json(organizationUser) + } catch (error) { + if (queryRunner && queryRunner.isTransactionActive) await queryRunner.rollbackTransaction() + next(error) + } finally { + if (queryRunner && !queryRunner.isReleased) await queryRunner.release() + } + } +} diff --git a/packages/server/src/enterprise/controllers/organization.controller.ts b/packages/server/src/enterprise/controllers/organization.controller.ts new file mode 100644 index 000000000..b7ca0a6d7 --- /dev/null +++ b/packages/server/src/enterprise/controllers/organization.controller.ts @@ -0,0 +1,187 @@ +import { Request, Response, NextFunction } from 'express' +import { StatusCodes } from 'http-status-codes' +import { OrganizationErrorMessage, OrganizationService } from '../services/organization.service' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { Organization } from '../database/entities/organization.entity' +import { GeneralErrorMessage } from '../../utils/constants' +import { OrganizationUserService } from '../services/organization-user.service' +import { getCurrentUsage } from '../../utils/quotaUsage' + +export class OrganizationController { + public async create(req: Request, res: Response, next: NextFunction) { + try { + const organizationUserService = new OrganizationUserService() + const newOrganization = await organizationUserService.createOrganization(req.body) + return res.status(StatusCodes.CREATED).json(newOrganization) + } catch (error) { + next(error) + } + } + + public async read(req: Request, res: Response, next: NextFunction) { + let queryRunner + try { + queryRunner = getRunningExpressApp().AppDataSource.createQueryRunner() + await queryRunner.connect() + const query = req.query as Partial + const organizationService = new OrganizationService() + + let organization: Organization | null + if (query.id) { + organization = await organizationService.readOrganizationById(query.id, queryRunner) + if (!organization) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationErrorMessage.ORGANIZATION_NOT_FOUND) + } else if (query.name) { + organization = await organizationService.readOrganizationByName(query.name, queryRunner) + if (!organization) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationErrorMessage.ORGANIZATION_NOT_FOUND) + } else { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, GeneralErrorMessage.UNHANDLED_EDGE_CASE) + } + + return res.status(StatusCodes.OK).json(organization) + } catch (error) { + next(error) + } finally { + if (queryRunner) await queryRunner.release() + } + } + + public async update(req: Request, res: Response, next: NextFunction) { + try { + const organizationService = new OrganizationService() + const organization = await organizationService.updateOrganization(req.body) + return res.status(StatusCodes.OK).json(organization) + } catch (error) { + next(error) + } + } + + public async getAdditionalSeatsQuantity(req: Request, res: Response, next: NextFunction) { + try { + const { subscriptionId } = req.query + if (!subscriptionId) { + return res.status(400).json({ error: 'Subscription ID is required' }) + } + const organizationUserservice = new OrganizationUserService() + const totalOrgUsers = await organizationUserservice.readOrgUsersCountByOrgId(req.user?.activeOrganizationId as string) + + const identityManager = getRunningExpressApp().identityManager + const result = await identityManager.getAdditionalSeatsQuantity(subscriptionId as string) + + return res.status(StatusCodes.OK).json({ ...result, totalOrgUsers }) + } catch (error) { + next(error) + } + } + + public async getCustomerWithDefaultSource(req: Request, res: Response, next: NextFunction) { + try { + const { customerId } = req.query + if (!customerId) { + return res.status(400).json({ error: 'Customer ID is required' }) + } + const identityManager = getRunningExpressApp().identityManager + const result = await identityManager.getCustomerWithDefaultSource(customerId as string) + + return res.status(StatusCodes.OK).json(result) + } catch (error) { + next(error) + } + } + + public async getAdditionalSeatsProration(req: Request, res: Response, next: NextFunction) { + try { + const { subscriptionId, quantity } = req.query + if (!subscriptionId) { + return res.status(400).json({ error: 'Customer ID is required' }) + } + if (quantity === undefined) { + return res.status(400).json({ error: 'Quantity is required' }) + } + const identityManager = getRunningExpressApp().identityManager + const result = await identityManager.getAdditionalSeatsProration(subscriptionId as string, parseInt(quantity as string)) + + return res.status(StatusCodes.OK).json(result) + } catch (error) { + next(error) + } + } + + public async getPlanProration(req: Request, res: Response, next: NextFunction) { + try { + const { subscriptionId, newPlanId } = req.query + if (!subscriptionId) { + return res.status(400).json({ error: 'Subscription ID is required' }) + } + if (!newPlanId) { + return res.status(400).json({ error: 'New plan ID is required' }) + } + const identityManager = getRunningExpressApp().identityManager + const result = await identityManager.getPlanProration(subscriptionId as string, newPlanId as string) + + return res.status(StatusCodes.OK).json(result) + } catch (error) { + next(error) + } + } + + public async updateAdditionalSeats(req: Request, res: Response, next: NextFunction) { + try { + const { subscriptionId, quantity, prorationDate } = req.body + if (!subscriptionId) { + return res.status(400).json({ error: 'Subscription ID is required' }) + } + if (quantity === undefined) { + return res.status(400).json({ error: 'Quantity is required' }) + } + if (!prorationDate) { + return res.status(400).json({ error: 'Proration date is required' }) + } + const identityManager = getRunningExpressApp().identityManager + const result = await identityManager.updateAdditionalSeats(subscriptionId, quantity, prorationDate) + + return res.status(StatusCodes.OK).json(result) + } catch (error) { + next(error) + } + } + + public async updateSubscriptionPlan(req: Request, res: Response, next: NextFunction) { + try { + const { subscriptionId, newPlanId, prorationDate } = req.body + if (!subscriptionId) { + return res.status(400).json({ error: 'Subscription ID is required' }) + } + if (!newPlanId) { + return res.status(400).json({ error: 'New plan ID is required' }) + } + if (!prorationDate) { + return res.status(400).json({ error: 'Proration date is required' }) + } + const identityManager = getRunningExpressApp().identityManager + const result = await identityManager.updateSubscriptionPlan(req, subscriptionId, newPlanId, prorationDate) + + return res.status(StatusCodes.OK).json(result) + } catch (error) { + next(error) + } + } + + public async getCurrentUsage(req: Request, res: Response, next: NextFunction) { + try { + const orgId = req.user?.activeOrganizationId + const subscriptionId = req.user?.activeOrganizationSubscriptionId + if (!orgId) { + return res.status(400).json({ error: 'Organization ID is required' }) + } + if (!subscriptionId) { + return res.status(400).json({ error: 'Subscription ID is required' }) + } + const usageCacheManager = getRunningExpressApp().usageCacheManager + const result = await getCurrentUsage(orgId, subscriptionId, usageCacheManager) + return res.status(StatusCodes.OK).json(result) + } catch (error) { + next(error) + } + } +} diff --git a/packages/server/src/enterprise/controllers/role.controller.ts b/packages/server/src/enterprise/controllers/role.controller.ts new file mode 100644 index 000000000..d12b8657a --- /dev/null +++ b/packages/server/src/enterprise/controllers/role.controller.ts @@ -0,0 +1,70 @@ +import { NextFunction, Request, Response } from 'express' +import { StatusCodes } from 'http-status-codes' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { Role } from '../database/entities/role.entity' +import { RoleService } from '../services/role.service' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' + +export class RoleController { + public async create(req: Request, res: Response, next: NextFunction) { + try { + const roleService = new RoleService() + const newRole = await roleService.createRole(req.body) + return res.status(StatusCodes.CREATED).json(newRole) + } catch (error) { + next(error) + } + } + + public async read(req: Request, res: Response, next: NextFunction) { + let queryRunner + try { + queryRunner = getRunningExpressApp().AppDataSource.createQueryRunner() + await queryRunner.connect() + const query = req.query as Partial + const roleService = new RoleService() + + let role: Role | Role[] | null | (Role & { userCount: number })[] + if (query.id) { + role = await roleService.readRoleById(query.id, queryRunner) + } else if (query.organizationId) { + role = await roleService.readRoleByOrganizationId(query.organizationId, queryRunner) + } else { + role = await roleService.readRoleByGeneral(queryRunner) + } + + return res.status(StatusCodes.OK).json(role) + } catch (error) { + next(error) + } finally { + if (queryRunner) await queryRunner.release() + } + } + + public async update(req: Request, res: Response, next: NextFunction) { + try { + const roleService = new RoleService() + const role = await roleService.updateRole(req.body) + return res.status(StatusCodes.OK).json(role) + } catch (error) { + next(error) + } + } + + public async delete(req: Request, res: Response, next: NextFunction) { + try { + const query = req.query as Partial + if (!query.id) { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, 'Role ID is required') + } + if (!query.organizationId) { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, 'Organization ID is required') + } + const roleService = new RoleService() + const role = await roleService.deleteRole(query.organizationId, query.id) + return res.status(StatusCodes.OK).json(role) + } catch (error) { + next(error) + } + } +} diff --git a/packages/server/src/enterprise/controllers/user.controller.ts b/packages/server/src/enterprise/controllers/user.controller.ts new file mode 100644 index 000000000..2acc458bb --- /dev/null +++ b/packages/server/src/enterprise/controllers/user.controller.ts @@ -0,0 +1,77 @@ +import { NextFunction, Request, Response } from 'express' +import { StatusCodes } from 'http-status-codes' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { GeneralErrorMessage } from '../../utils/constants' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { User } from '../database/entities/user.entity' +import { UserErrorMessage, UserService } from '../services/user.service' + +export class UserController { + public async create(req: Request, res: Response, next: NextFunction) { + try { + const userService = new UserService() + const user = await userService.createUser(req.body) + return res.status(StatusCodes.CREATED).json(user) + } catch (error) { + next(error) + } + } + + public async read(req: Request, res: Response, next: NextFunction) { + let queryRunner + try { + queryRunner = getRunningExpressApp().AppDataSource.createQueryRunner() + await queryRunner.connect() + const query = req.query as Partial + const userService = new UserService() + + let user: User | null + if (query.id) { + user = await userService.readUserById(query.id, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + } else if (query.email) { + user = await userService.readUserByEmail(query.email, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + } else { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, GeneralErrorMessage.UNHANDLED_EDGE_CASE) + } + + if (user) { + delete user.credential + delete user.tempToken + delete user.tokenExpiry + } + return res.status(StatusCodes.OK).json(user) + } catch (error) { + next(error) + } finally { + if (queryRunner) await queryRunner.release() + } + } + + public async update(req: Request, res: Response, next: NextFunction) { + try { + const userService = new UserService() + const currentUser = req.user + if (!currentUser) { + throw new InternalFlowiseError(StatusCodes.UNAUTHORIZED, UserErrorMessage.USER_NOT_FOUND) + } + const { id } = req.body + if (currentUser.id !== id) { + throw new InternalFlowiseError(StatusCodes.FORBIDDEN, UserErrorMessage.USER_NOT_FOUND) + } + const user = await userService.updateUser(req.body) + return res.status(StatusCodes.OK).json(user) + } catch (error) { + next(error) + } + } + + public async test(req: Request, res: Response, next: NextFunction) { + try { + return res.status(StatusCodes.OK).json({ message: 'Hello World' }) + } catch (error) { + next(error) + } + } +} diff --git a/packages/server/src/enterprise/controllers/workspace-user.controller.ts b/packages/server/src/enterprise/controllers/workspace-user.controller.ts new file mode 100644 index 000000000..beab8b1ff --- /dev/null +++ b/packages/server/src/enterprise/controllers/workspace-user.controller.ts @@ -0,0 +1,87 @@ +import { NextFunction, Request, Response } from 'express' +import { StatusCodes } from 'http-status-codes' +import { QueryRunner } from 'typeorm' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { GeneralErrorMessage } from '../../utils/constants' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { WorkspaceUser } from '../database/entities/workspace-user.entity' +import { WorkspaceUserService } from '../services/workspace-user.service' + +export class WorkspaceUserController { + public async create(req: Request, res: Response, next: NextFunction) { + try { + const workspaceUserService = new WorkspaceUserService() + const newWorkspaceUser = await workspaceUserService.createWorkspaceUser(req.body) + return res.status(StatusCodes.CREATED).json(newWorkspaceUser) + } catch (error) { + next(error) + } + } + + public async read(req: Request, res: Response, next: NextFunction) { + let queryRunner + try { + queryRunner = getRunningExpressApp().AppDataSource.createQueryRunner() + await queryRunner.connect() + const query = req.query as Partial + const workspaceUserService = new WorkspaceUserService() + + let workspaceUser: any + if (query.workspaceId && query.userId) { + workspaceUser = await workspaceUserService.readWorkspaceUserByWorkspaceIdUserId( + query.workspaceId, + query.userId, + queryRunner + ) + } else if (query.workspaceId) { + workspaceUser = await workspaceUserService.readWorkspaceUserByWorkspaceId(query.workspaceId, queryRunner) + } else if (query.organizationId && query.userId) { + workspaceUser = await workspaceUserService.readWorkspaceUserByOrganizationIdUserId( + query.organizationId, + query.userId, + queryRunner + ) + } else if (query.userId) { + workspaceUser = await workspaceUserService.readWorkspaceUserByUserId(query.userId, queryRunner) + } else if (query.roleId) { + workspaceUser = await workspaceUserService.readWorkspaceUserByRoleId(query.roleId, queryRunner) + } else { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, GeneralErrorMessage.UNHANDLED_EDGE_CASE) + } + + return res.status(StatusCodes.OK).json(workspaceUser) + } catch (error) { + next(error) + } finally { + if (queryRunner) await queryRunner.release() + } + } + + public async update(req: Request, res: Response, next: NextFunction) { + let queryRunner: QueryRunner | undefined + try { + queryRunner = getRunningExpressApp().AppDataSource.createQueryRunner() + await queryRunner.connect() + const workspaceUserService = new WorkspaceUserService() + const workspaceUser = await workspaceUserService.updateWorkspaceUser(req.body, queryRunner) + return res.status(StatusCodes.OK).json(workspaceUser) + } catch (error) { + if (queryRunner && queryRunner.isTransactionActive) await queryRunner.rollbackTransaction() + next(error) + } finally { + if (queryRunner && !queryRunner.isReleased) await queryRunner.release() + } + } + + public async delete(req: Request, res: Response, next: NextFunction) { + try { + const query = req.query as Partial + + const workspaceUserService = new WorkspaceUserService() + const workspaceUser = await workspaceUserService.deleteWorkspaceUser(query.workspaceId, query.userId) + return res.status(StatusCodes.OK).json(workspaceUser) + } catch (error) { + next(error) + } + } +} diff --git a/packages/server/src/enterprise/controllers/workspace.controller.ts b/packages/server/src/enterprise/controllers/workspace.controller.ts new file mode 100644 index 000000000..dc29f97a6 --- /dev/null +++ b/packages/server/src/enterprise/controllers/workspace.controller.ts @@ -0,0 +1,239 @@ +import { NextFunction, Request, Response } from 'express' +import { StatusCodes } from 'http-status-codes' +import { QueryRunner } from 'typeorm' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { GeneralErrorMessage } from '../../utils/constants' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { OrganizationUserStatus } from '../database/entities/organization-user.entity' +import { GeneralRole } from '../database/entities/role.entity' +import { WorkspaceUserStatus } from '../database/entities/workspace-user.entity' +import { Workspace } from '../database/entities/workspace.entity' +import { IAssignedWorkspace, LoggedInUser } from '../Interface.Enterprise' +import { OrganizationUserErrorMessage, OrganizationUserService } from '../services/organization-user.service' +import { OrganizationErrorMessage, OrganizationService } from '../services/organization.service' +import { RoleErrorMessage, RoleService } from '../services/role.service' +import { UserErrorMessage, UserService } from '../services/user.service' +import { WorkspaceUserErrorMessage, WorkspaceUserService } from '../services/workspace-user.service' +import { WorkspaceErrorMessage, WorkspaceService } from '../services/workspace.service' + +export class WorkspaceController { + public async create(req: Request, res: Response, next: NextFunction) { + try { + const workspaceUserService = new WorkspaceUserService() + const newWorkspace = await workspaceUserService.createWorkspace(req.body) + return res.status(StatusCodes.CREATED).json(newWorkspace) + } catch (error) { + next(error) + } + } + + public async read(req: Request, res: Response, next: NextFunction) { + let queryRunner + try { + queryRunner = getRunningExpressApp().AppDataSource.createQueryRunner() + await queryRunner.connect() + const query = req.query as Partial + const workspaceService = new WorkspaceService() + + let workspace: + | Workspace + | null + | (Workspace & { + userCount: number + })[] + if (query.id) { + workspace = await workspaceService.readWorkspaceById(query.id, queryRunner) + } else if (query.organizationId) { + workspace = await workspaceService.readWorkspaceByOrganizationId(query.organizationId, queryRunner) + } else { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, GeneralErrorMessage.UNHANDLED_EDGE_CASE) + } + + return res.status(StatusCodes.OK).json(workspace) + } catch (error) { + next(error) + } finally { + if (queryRunner) await queryRunner.release() + } + } + + public async switchWorkspace(req: Request, res: Response, next: NextFunction) { + if (!req.user) { + return next(new InternalFlowiseError(StatusCodes.UNAUTHORIZED, `Unauthorized: User not found`)) + } + let queryRunner + try { + queryRunner = getRunningExpressApp().AppDataSource.createQueryRunner() + await queryRunner.connect() + const query = req.query as Partial + await queryRunner.startTransaction() + + const workspaceService = new WorkspaceService() + const workspace = await workspaceService.readWorkspaceById(query.id, queryRunner) + if (!workspace) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, WorkspaceErrorMessage.WORKSPACE_NOT_FOUND) + + const userService = new UserService() + const user = await userService.readUserById(req.user.id, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + + const workspaceUserService = new WorkspaceUserService() + const { workspaceUser } = await workspaceUserService.readWorkspaceUserByWorkspaceIdUserId(query.id, req.user.id, queryRunner) + if (!workspaceUser) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, WorkspaceUserErrorMessage.WORKSPACE_USER_NOT_FOUND) + workspaceUser.lastLogin = new Date().toISOString() + workspaceUser.status = WorkspaceUserStatus.ACTIVE + workspaceUser.updatedBy = user.id + await workspaceUserService.saveWorkspaceUser(workspaceUser, queryRunner) + + const organizationUserService = new OrganizationUserService() + const { organizationUser } = await organizationUserService.readOrganizationUserByWorkspaceIdUserId( + workspaceUser.workspaceId, + workspaceUser.userId, + queryRunner + ) + if (!organizationUser) + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationUserErrorMessage.ORGANIZATION_USER_NOT_FOUND) + organizationUser.status = OrganizationUserStatus.ACTIVE + organizationUser.updatedBy = user.id + await organizationUserService.saveOrganizationUser(organizationUser, queryRunner) + + const roleService = new RoleService() + const ownerRole = await roleService.readGeneralRoleByName(GeneralRole.OWNER, queryRunner) + const role = await roleService.readRoleById(workspaceUser.roleId, queryRunner) + if (!role) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + + const orgService = new OrganizationService() + const org = await orgService.readOrganizationById(organizationUser.organizationId, queryRunner) + if (!org) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationErrorMessage.ORGANIZATION_NOT_FOUND) + const subscriptionId = org.subscriptionId as string + const customerId = org.customerId as string + const features = await getRunningExpressApp().identityManager.getFeaturesByPlan(subscriptionId) + const productId = await getRunningExpressApp().identityManager.getProductIdFromSubscription(subscriptionId) + + const workspaceUsers = await workspaceUserService.readWorkspaceUserByUserId(req.user.id, queryRunner) + const assignedWorkspaces: IAssignedWorkspace[] = workspaceUsers.map((workspaceUser) => { + return { + id: workspaceUser.workspace.id, + name: workspaceUser.workspace.name, + role: workspaceUser.role?.name, + organizationId: workspaceUser.workspace.organizationId + } as IAssignedWorkspace + }) + + const loggedInUser: LoggedInUser & { role: string; isSSO: boolean } = { + ...req.user, + activeOrganizationId: org.id, + activeOrganizationSubscriptionId: subscriptionId, + activeOrganizationCustomerId: customerId, + activeOrganizationProductId: productId, + isOrganizationAdmin: workspaceUser.roleId === ownerRole.id, + activeWorkspaceId: workspace.id, + activeWorkspace: workspace.name, + assignedWorkspaces, + isSSO: req.user.ssoProvider ? true : false, + permissions: [...JSON.parse(role.permissions)], + features, + role: role.name, + roleId: role.id + } + + // update the passport session + req.user = { + ...req.user, + ...loggedInUser + } + + // Update passport session + // @ts-ignore + req.session.passport.user = { + ...req.user, + ...loggedInUser + } + + req.session.save((err) => { + if (err) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, GeneralErrorMessage.UNHANDLED_EDGE_CASE) + }) + + await queryRunner.commitTransaction() + return res.status(StatusCodes.OK).json(loggedInUser) + } catch (error) { + if (queryRunner && !queryRunner.isTransactionActive) { + await queryRunner.rollbackTransaction() + } + next(error) + } finally { + if (queryRunner && !queryRunner.isReleased) { + await queryRunner.release() + } + } + } + + public async update(req: Request, res: Response, next: NextFunction) { + try { + const workspaceService = new WorkspaceService() + const workspace = await workspaceService.updateWorkspace(req.body) + return res.status(StatusCodes.OK).json(workspace) + } catch (error) { + next(error) + } + } + + public async delete(req: Request, res: Response, next: NextFunction) { + let queryRunner: QueryRunner | undefined + try { + queryRunner = getRunningExpressApp().AppDataSource.createQueryRunner() + await queryRunner.connect() + const workspaceId = req.params.id + if (!workspaceId) { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, WorkspaceErrorMessage.INVALID_WORKSPACE_ID) + } + const workspaceService = new WorkspaceService() + await queryRunner.startTransaction() + + const workspace = await workspaceService.deleteWorkspaceById(queryRunner, workspaceId) + + await queryRunner.commitTransaction() + return res.status(StatusCodes.OK).json(workspace) + } catch (error) { + if (queryRunner && queryRunner.isTransactionActive) await queryRunner.rollbackTransaction() + next(error) + } finally { + if (queryRunner && !queryRunner.isReleased) await queryRunner.release() + } + } + + public async getSharedWorkspacesForItem(req: Request, res: Response, next: NextFunction) { + try { + if (typeof req.params === 'undefined' || !req.params.id) { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, WorkspaceErrorMessage.INVALID_WORKSPACE_ID) + } + const workspaceService = new WorkspaceService() + return res.json(await workspaceService.getSharedWorkspacesForItem(req.params.id)) + } catch (error) { + next(error) + } + } + + public async setSharedWorkspacesForItem(req: Request, res: Response, next: NextFunction) { + try { + if (!req.user) { + throw new InternalFlowiseError(StatusCodes.UNAUTHORIZED, `Unauthorized: User not found`) + } + if (typeof req.params === 'undefined' || !req.params.id) { + throw new InternalFlowiseError( + StatusCodes.UNAUTHORIZED, + `Error: workspaceController.setSharedWorkspacesForItem - id not provided!` + ) + } + if (!req.body) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: workspaceController.setSharedWorkspacesForItem - body not provided!` + ) + } + const workspaceService = new WorkspaceService() + return res.json(await workspaceService.setSharedWorkspacesForItem(req.params.id, req.body)) + } catch (error) { + next(error) + } + } +} diff --git a/packages/server/src/enterprise/database/entities/EnterpriseEntities.ts b/packages/server/src/enterprise/database/entities/EnterpriseEntities.ts new file mode 100644 index 000000000..da111d98a --- /dev/null +++ b/packages/server/src/enterprise/database/entities/EnterpriseEntities.ts @@ -0,0 +1,62 @@ +import { Column, Entity, PrimaryGeneratedColumn, UpdateDateColumn } from 'typeorm' +import { ILoginActivity, IWorkspaceShared, IWorkspaceUser } from '../../Interface.Enterprise' + +@Entity('workspace_users') +export class WorkspaceUsers implements IWorkspaceUser { + @PrimaryGeneratedColumn('uuid') + id: string + + @Column({ type: 'text' }) + workspaceId: string + + @Column({ type: 'text' }) + userId: string + + @Column({ type: 'text' }) + role: string +} + +@Entity('workspace_shared') +export class WorkspaceShared implements IWorkspaceShared { + @PrimaryGeneratedColumn('uuid') + id: string + + @Column({ type: 'text' }) + workspaceId: string + + @Column({ type: 'text' }) + sharedItemId: string + + @Column({ type: 'text', name: 'itemType' }) + itemType: string + + @Column({ type: 'timestamp' }) + @UpdateDateColumn() + createdDate: Date + + @Column({ type: 'timestamp' }) + @UpdateDateColumn() + updatedDate: Date +} + +@Entity('login_activity') +export class LoginActivity implements ILoginActivity { + @PrimaryGeneratedColumn('uuid') + id: string + + @Column({ type: 'text' }) + username: string + + @Column({ name: 'activity_code' }) + activityCode: number + + @Column({ name: 'login_mode' }) + loginMode: string + + @Column({ type: 'text' }) + message: string + + @Column({ type: 'timestamp' }) + @UpdateDateColumn() + attemptedDateTime: Date +} diff --git a/packages/server/src/enterprise/database/entities/login-method.entity.ts b/packages/server/src/enterprise/database/entities/login-method.entity.ts new file mode 100644 index 000000000..b30a991b4 --- /dev/null +++ b/packages/server/src/enterprise/database/entities/login-method.entity.ts @@ -0,0 +1,47 @@ +import { Column, CreateDateColumn, Entity, JoinColumn, ManyToOne, PrimaryGeneratedColumn, UpdateDateColumn } from 'typeorm' +import { User } from './user.entity' +import { Organization } from './organization.entity' + +export enum LoginMethodStatus { + ENABLE = 'enable', + DISABLE = 'disable' +} + +@Entity({ name: 'login_method' }) +export class LoginMethod { + @PrimaryGeneratedColumn('uuid') + id: string + + @Column({ nullable: true }) + organizationId?: string + @ManyToOne(() => Organization, (organization) => organization.id) + @JoinColumn({ name: 'organizationId' }) + organization?: Organization + + @Column({ type: 'varchar', length: 100 }) + name: string + + @Column({ type: 'text' }) + config: string + + @Column({ type: 'varchar', length: 20, default: LoginMethodStatus.ENABLE }) + status?: string + + @CreateDateColumn() + createdDate?: Date + + @UpdateDateColumn() + updatedDate?: Date + + @Column({ nullable: true }) + createdBy?: string + @ManyToOne(() => User, (user) => user.createdByLoginMethod) + @JoinColumn({ name: 'createdBy' }) + createdByUser?: User + + @Column({ nullable: true }) + updatedBy?: string + @ManyToOne(() => User, (user) => user.updatedByLoginMethod) + @JoinColumn({ name: 'updatedBy' }) + updatedByUser?: User +} diff --git a/packages/server/src/enterprise/database/entities/login-session.entity.ts b/packages/server/src/enterprise/database/entities/login-session.entity.ts new file mode 100644 index 000000000..94ffa7bf2 --- /dev/null +++ b/packages/server/src/enterprise/database/entities/login-session.entity.ts @@ -0,0 +1,13 @@ +import { Column, Entity, PrimaryColumn } from 'typeorm' + +@Entity({ name: 'login_sessions' }) +export class LoginSession { + @PrimaryColumn({ type: 'varchar' }) + sid: string + + @Column({ type: 'text' }) + sess: string + + @Column({ type: 'bigint', nullable: true }) + expire?: number +} diff --git a/packages/server/src/enterprise/database/entities/organization-user.entity.ts b/packages/server/src/enterprise/database/entities/organization-user.entity.ts new file mode 100644 index 000000000..540ba5c28 --- /dev/null +++ b/packages/server/src/enterprise/database/entities/organization-user.entity.ts @@ -0,0 +1,52 @@ +import { Column, CreateDateColumn, Entity, JoinColumn, ManyToOne, PrimaryColumn, UpdateDateColumn } from 'typeorm' +import { Organization } from './organization.entity' +import { Role } from './role.entity' +import { User } from './user.entity' + +export enum OrganizationUserStatus { + ACTIVE = 'active', + DISABLE = 'disable', + INVITED = 'invited' +} + +@Entity({ name: 'organization_user' }) +export class OrganizationUser { + @PrimaryColumn() + organizationId: string + @ManyToOne(() => Organization, (organization) => organization.id) + @JoinColumn({ name: 'organizationId' }) + organization: Organization + + @PrimaryColumn() + userId: string + @ManyToOne(() => User, (user) => user.id) + @JoinColumn({ name: 'userId' }) + user: User + + @Column({ type: 'uuid', nullable: false }) + roleId: string + @ManyToOne(() => Role, (role) => role.id) + @JoinColumn({ name: 'roleId' }) + role?: Role + + @Column({ type: 'varchar', length: 20, default: OrganizationUserStatus.ACTIVE }) + status?: string + + @CreateDateColumn() + createdDate?: Date + + @UpdateDateColumn() + updatedDate?: Date + + @Column({ nullable: false }) + createdBy?: string + @ManyToOne(() => User, (user) => user.createdOrganizationUser) + @JoinColumn({ name: 'createdBy' }) + createdByUser?: User + + @Column({ nullable: false }) + updatedBy?: string + @ManyToOne(() => User, (user) => user.updatedOrganizationUser) + @JoinColumn({ name: 'updatedBy' }) + updatedByUser?: User +} diff --git a/packages/server/src/enterprise/database/entities/organization.entity.ts b/packages/server/src/enterprise/database/entities/organization.entity.ts new file mode 100644 index 000000000..1f6ad47ca --- /dev/null +++ b/packages/server/src/enterprise/database/entities/organization.entity.ts @@ -0,0 +1,39 @@ +import { Column, CreateDateColumn, Entity, JoinColumn, ManyToOne, PrimaryGeneratedColumn, UpdateDateColumn } from 'typeorm' +import { User } from './user.entity' + +export enum OrganizationName { + DEFAULT_ORGANIZATION = 'Default Organization' +} + +@Entity() +export class Organization { + @PrimaryGeneratedColumn('uuid') + id: string + + @Column({ type: 'varchar', length: 100, default: OrganizationName.DEFAULT_ORGANIZATION }) + name: string + + @Column({ type: 'varchar', length: 100, nullable: true }) + customerId?: string + + @Column({ type: 'varchar', length: 100, nullable: true }) + subscriptionId?: string + + @CreateDateColumn() + createdDate?: Date + + @UpdateDateColumn() + updatedDate?: Date + + @Column({ nullable: false }) + createdBy?: string + @ManyToOne(() => User, (user) => user.createdOrganizations) + @JoinColumn({ name: 'createdBy' }) + createdByUser?: User + + @Column({ nullable: false }) + updatedBy?: string + @ManyToOne(() => User, (user) => user.updatedOrganizations) + @JoinColumn({ name: 'updatedBy' }) + updatedByUser?: User +} diff --git a/packages/server/src/enterprise/database/entities/role.entity.ts b/packages/server/src/enterprise/database/entities/role.entity.ts new file mode 100644 index 000000000..86364cb9e --- /dev/null +++ b/packages/server/src/enterprise/database/entities/role.entity.ts @@ -0,0 +1,48 @@ +import { Column, CreateDateColumn, Entity, JoinColumn, ManyToOne, PrimaryGeneratedColumn, UpdateDateColumn } from 'typeorm' +import { Organization } from './organization.entity' +import { User } from './user.entity' + +export enum GeneralRole { + OWNER = 'owner', + MEMBER = 'member', + PERSONAL_WORKSPACE = 'personal workspace' +} + +@Entity() +export class Role { + @PrimaryGeneratedColumn('uuid') + id: string + + @Column({ nullable: true }) + organizationId?: string + @ManyToOne(() => Organization, (organization) => organization.id) + @JoinColumn({ name: 'organizationId' }) + organization?: Organization + + @Column({ type: 'varchar', length: 100 }) + name: string + + @Column({ type: 'text', nullable: true }) + description?: string + + @Column({ type: 'text' }) + permissions: string + + @CreateDateColumn() + createdDate?: Date + + @UpdateDateColumn() + updatedDate?: Date + + @Column({ nullable: true }) + createdBy?: string + @ManyToOne(() => User, (user) => user.createdRoles) + @JoinColumn({ name: 'createdBy' }) + createdByUser?: User + + @Column({ nullable: true }) + updatedBy?: string + @ManyToOne(() => User, (user) => user.updatedRoles) + @JoinColumn({ name: 'updatedBy' }) + updatedByUser?: User +} diff --git a/packages/server/src/enterprise/database/entities/user.entity.ts b/packages/server/src/enterprise/database/entities/user.entity.ts new file mode 100644 index 000000000..3bb455aef --- /dev/null +++ b/packages/server/src/enterprise/database/entities/user.entity.ts @@ -0,0 +1,92 @@ +import { Column, CreateDateColumn, Entity, JoinColumn, ManyToOne, OneToMany, PrimaryGeneratedColumn, UpdateDateColumn } from 'typeorm' +import { LoginMethod } from './login-method.entity' +import { OrganizationUser } from './organization-user.entity' +import { Organization } from './organization.entity' +import { Role } from './role.entity' +import { WorkspaceUser } from './workspace-user.entity' +import { Workspace } from './workspace.entity' + +export enum UserStatus { + ACTIVE = 'active', + INVITED = 'invited', + UNVERIFIED = 'unverified', + DELETED = 'deleted' +} + +@Entity() +export class User { + @PrimaryGeneratedColumn('uuid') + id: string + + @Column({ type: 'varchar', length: 100 }) + name: string + + @Column({ type: 'varchar', length: 255, unique: true }) + email: string + + @Column({ type: 'text', nullable: true }) + credential?: string | null + + @Column({ type: 'text', nullable: true, unique: true }) + tempToken?: string | null + + @CreateDateColumn({ nullable: true }) + tokenExpiry?: Date | null + + @Column({ type: 'varchar', length: 20, default: UserStatus.UNVERIFIED }) + status: string + + @CreateDateColumn() + createdDate?: Date + + @UpdateDateColumn() + updatedDate?: Date + + @Column({ nullable: false }) + createdBy: string + @ManyToOne(() => User, (user) => user.id, {}) + @JoinColumn({ name: 'createdBy' }) + createdByUser?: User + + @Column({ nullable: false }) + updatedBy: string + @ManyToOne(() => User, (user) => user.id, {}) + @JoinColumn({ name: 'updatedBy' }) + updatedByUser?: User + + @OneToMany(() => Organization, (organization) => organization.createdByUser) + createdOrganizations?: Organization[] + + @OneToMany(() => Organization, (organization) => organization.updatedByUser) + updatedOrganizations?: Organization[] + + @OneToMany(() => Role, (role) => role.createdByUser) + createdRoles?: Role[] + + @OneToMany(() => Role, (role) => role.updatedByUser) + updatedRoles?: Role[] + + @OneToMany(() => OrganizationUser, (organizationUser) => organizationUser.createdByUser) + createdOrganizationUser?: OrganizationUser[] + + @OneToMany(() => OrganizationUser, (organizationUser) => organizationUser.updatedByUser) + updatedOrganizationUser?: OrganizationUser[] + + @OneToMany(() => Workspace, (workspace) => workspace.createdByUser) + createdWorkspace?: Workspace[] + + @OneToMany(() => Workspace, (workspace) => workspace.updatedByUser) + updatedWorkspace?: Workspace[] + + @OneToMany(() => WorkspaceUser, (workspaceUser) => workspaceUser.createdByUser) + createdWorkspaceUser?: WorkspaceUser[] + + @OneToMany(() => WorkspaceUser, (workspaceUser) => workspaceUser.updatedByUser) + updatedByWorkspaceUser?: WorkspaceUser[] + + @OneToMany(() => LoginMethod, (loginMethod) => loginMethod.createdByUser) + createdByLoginMethod?: LoginMethod[] + + @OneToMany(() => LoginMethod, (loginMethod) => loginMethod.updatedByUser) + updatedByLoginMethod?: LoginMethod[] +} diff --git a/packages/server/src/enterprise/database/entities/workspace-user.entity.ts b/packages/server/src/enterprise/database/entities/workspace-user.entity.ts new file mode 100644 index 000000000..0afb71a92 --- /dev/null +++ b/packages/server/src/enterprise/database/entities/workspace-user.entity.ts @@ -0,0 +1,55 @@ +import { Column, CreateDateColumn, Entity, JoinColumn, ManyToOne, PrimaryColumn, UpdateDateColumn } from 'typeorm' +import { User } from './user.entity' +import { Role } from './role.entity' +import { Workspace } from './workspace.entity' + +export enum WorkspaceUserStatus { + ACTIVE = 'active', + DISABLE = 'disable', + INVITED = 'invited' +} + +@Entity({ name: 'workspace_user' }) +export class WorkspaceUser { + @PrimaryColumn() + workspaceId: string + @ManyToOne(() => Workspace, (workspace) => workspace.id) + @JoinColumn({ name: 'workspaceId' }) + workspace: Workspace + + @PrimaryColumn() + userId: string + @ManyToOne(() => User, (user) => user.id) + @JoinColumn({ name: 'userId' }) + user: User + + @Column({ type: 'uuid', nullable: false }) + roleId: string + @ManyToOne(() => Role, (role) => role.id) + @JoinColumn({ name: 'roleId' }) + role?: Role + + @Column({ type: 'varchar', length: 20, default: WorkspaceUserStatus.INVITED }) + status?: string + + @CreateDateColumn() + lastLogin?: string + + @CreateDateColumn() + createdDate?: Date + + @UpdateDateColumn() + updatedDate?: Date + + @Column({ nullable: false }) + createdBy?: string + @ManyToOne(() => User, (user) => user.createdWorkspaceUser) + @JoinColumn({ name: 'createdBy' }) + createdByUser?: User + + @Column({ nullable: false }) + updatedBy?: string + @ManyToOne(() => User, (user) => user.updatedByWorkspaceUser) + @JoinColumn({ name: 'updatedBy' }) + updatedByUser?: User +} diff --git a/packages/server/src/enterprise/database/entities/workspace.entity.ts b/packages/server/src/enterprise/database/entities/workspace.entity.ts new file mode 100644 index 000000000..228d8efc4 --- /dev/null +++ b/packages/server/src/enterprise/database/entities/workspace.entity.ts @@ -0,0 +1,44 @@ +import { Column, CreateDateColumn, Entity, JoinColumn, ManyToOne, PrimaryGeneratedColumn, UpdateDateColumn } from 'typeorm' +import { Organization } from './organization.entity' +import { User } from './user.entity' + +export enum WorkspaceName { + DEFAULT_WORKSPACE = 'Default Workspace', + DEFAULT_PERSONAL_WORKSPACE = 'Personal Workspace' +} + +@Entity() +export class Workspace { + @PrimaryGeneratedColumn('uuid') + id: string + + @Column({ type: 'varchar', length: 100, default: WorkspaceName.DEFAULT_PERSONAL_WORKSPACE }) + name: string + + @Column({ type: 'text', nullable: true }) + description?: string + + @Column({ nullable: false }) + organizationId?: string + @ManyToOne(() => Organization, (organization) => organization.id) + @JoinColumn({ name: 'organizationId' }) + organization?: Organization + + @CreateDateColumn() + createdDate?: Date + + @UpdateDateColumn() + updatedDate?: Date + + @Column({ nullable: false }) + createdBy?: string + @ManyToOne(() => User, (user) => user.createdWorkspace) + @JoinColumn({ name: 'createdBy' }) + createdByUser?: User + + @Column({ nullable: false }) + updatedBy?: string + @ManyToOne(() => User, (user) => user.updatedWorkspace) + @JoinColumn({ name: 'updatedBy' }) + updatedByUser?: User +} diff --git a/packages/server/src/enterprise/database/migrations/mariadb/1720230151482-AddAuthTables.ts b/packages/server/src/enterprise/database/migrations/mariadb/1720230151482-AddAuthTables.ts new file mode 100644 index 000000000..8b3f2134d --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mariadb/1720230151482-AddAuthTables.ts @@ -0,0 +1,46 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddAuthTables1720230151482 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`user\` ( + \`id\` varchar(36) NOT NULL, + \`name\` varchar(255), + \`role\` varchar(20) NOT NULL, + \`email\` varchar(100) NOT NULL, + \`status\` varchar(20) NOT NULL, + \`credential\` text, + \`tempToken\` text, + \`tokenExpiry\` datetime(6), + \`activeWorkspaceId\` varchar(100), + \`lastLogin\` datetime(6), + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_520_ci;` + ) + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`roles\` ( + \`id\` varchar(36) NOT NULL, + \`name\` varchar(255), + \`description\` text, + \`permissions\` text, + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_520_ci;` + ) + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`login_activity\` ( + \`id\` varchar(36) NOT NULL, + \`username\` varchar(255), + \`message\` varchar(255) NOT NULL, + \`activity_code\` INT NOT NULL, + \`attemptedDateTime\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_520_ci;` + ) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE user`) + await queryRunner.query(`DROP TABLE roles`) + await queryRunner.query(`DROP TABLE login_activity`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/mariadb/1725437498242-AddWorkspace.ts b/packages/server/src/enterprise/database/migrations/mariadb/1725437498242-AddWorkspace.ts new file mode 100644 index 000000000..0acb7733b --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mariadb/1725437498242-AddWorkspace.ts @@ -0,0 +1,52 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { ensureColumnExists } from './mariaDbCustomFunctions' + +export class AddWorkspace1725437498242 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`workspace\` ( + \`id\` varchar(36) NOT NULL, + \`name\` varchar(255) NOT NULL, + \`description\` text DEFAULT NULL, + \`createdDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6), + \`updatedDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_520_ci;` + ) + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`workspace_users\` ( + \`id\` varchar(36) NOT NULL, + \`workspaceId\` varchar(36) NOT NULL, + \`userId\` varchar(36) NOT NULL, + \`role\` varchar(255) DEFAULT NULL, + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_520_ci;` + ) + + await ensureColumnExists(queryRunner, 'chat_flow', 'workspaceId', 'varchar(36)') + await ensureColumnExists(queryRunner, 'tool', 'workspaceId', 'varchar(36)') + await ensureColumnExists(queryRunner, 'assistant', 'workspaceId', 'varchar(36)') + await ensureColumnExists(queryRunner, 'credential', 'workspaceId', 'varchar(36)') + await ensureColumnExists(queryRunner, 'document_store', 'workspaceId', 'varchar(36)') + await ensureColumnExists(queryRunner, 'evaluation', 'workspaceId', 'varchar(36)') + await ensureColumnExists(queryRunner, 'evaluator', 'workspaceId', 'varchar(36)') + await ensureColumnExists(queryRunner, 'dataset', 'workspaceId', 'varchar(36)') + await ensureColumnExists(queryRunner, 'apikey', 'workspaceId', 'varchar(36)') + await ensureColumnExists(queryRunner, 'variable', 'workspaceId', 'varchar(36)') + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE workspace`) + await queryRunner.query(`DROP TABLE workspace_users`) + + await queryRunner.query(`ALTER TABLE \`chat_message\` DROP COLUMN \`workspaceId\`;`) + await queryRunner.query(`ALTER TABLE \`tool\` DROP COLUMN \`workspaceId\`;`) + await queryRunner.query(`ALTER TABLE \`assistant\` DROP COLUMN \`workspaceId\`;`) + await queryRunner.query(`ALTER TABLE \`credential\` DROP COLUMN \`workspaceId\`;`) + await queryRunner.query(`ALTER TABLE \`document_store\` DROP COLUMN \`workspaceId\`;`) + await queryRunner.query(`ALTER TABLE \`evaluation\` DROP COLUMN \`workspaceId\`;`) + await queryRunner.query(`ALTER TABLE \`dataset\` DROP COLUMN \`workspaceId\`;`) + await queryRunner.query(`ALTER TABLE \`apikey\` DROP COLUMN \`workspaceId\`;`) + await queryRunner.query(`ALTER TABLE \`variable\` DROP COLUMN \`workspaceId\`;`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/mariadb/1726654922034-AddWorkspaceShared.ts b/packages/server/src/enterprise/database/migrations/mariadb/1726654922034-AddWorkspaceShared.ts new file mode 100644 index 000000000..16375d79b --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mariadb/1726654922034-AddWorkspaceShared.ts @@ -0,0 +1,21 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddWorkspaceShared1726654922034 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`workspace_shared\` ( + \`id\` varchar(36) NOT NULL, + \`workspaceId\` varchar(50) NOT NULL, + \`sharedItemId\` varchar(50) NOT NULL, + \`itemType\` varchar(50) NOT NULL, + \`createdDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6), + \`updatedDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_520_ci;` + ) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE workspace_shared`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/mariadb/1726655750383-AddWorkspaceIdToCustomTemplate.ts b/packages/server/src/enterprise/database/migrations/mariadb/1726655750383-AddWorkspaceIdToCustomTemplate.ts new file mode 100644 index 000000000..c6b6c2ab2 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mariadb/1726655750383-AddWorkspaceIdToCustomTemplate.ts @@ -0,0 +1,11 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddWorkspaceIdToCustomTemplate1726655750383 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE \`custom_template\` ADD COLUMN \`workspaceId\` varchar(36);`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE \`custom_template\` DROP COLUMN \`workspaceId\`;`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/mariadb/1727798417345-AddOrganization.ts b/packages/server/src/enterprise/database/migrations/mariadb/1727798417345-AddOrganization.ts new file mode 100644 index 000000000..7397ceb40 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mariadb/1727798417345-AddOrganization.ts @@ -0,0 +1,26 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddOrganization1727798417345 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`organization\` ( + \`id\` varchar(36) NOT NULL, + \`name\` varchar(255) NOT NULL, + \`adminUserId\` varchar(255) NULL, + \`defaultWsId\` varchar(255) NULL, + \`organization_type\` varchar(255) NULL, + \`createdDate\` timestamp(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6), + \`updatedDate\` timestamp(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), + PRIMARY KEY (\`id\`), + KEY \`idx_organization_id\` (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_520_ci;` + ) + await queryRunner.query(`ALTER TABLE \`workspace\` ADD COLUMN \`organizationId\` varchar(36);`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE IF EXISTS \`organization\`;`) + + await queryRunner.query(`ALTER TABLE \`workspace\` DROP COLUMN \`organizationId\`;`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/mariadb/1729130948686-LinkWorkspaceId.ts b/packages/server/src/enterprise/database/migrations/mariadb/1729130948686-LinkWorkspaceId.ts new file mode 100644 index 000000000..b866afb96 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mariadb/1729130948686-LinkWorkspaceId.ts @@ -0,0 +1,233 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class LinkWorkspaceId1729130948686 implements MigrationInterface { + name = 'LinkWorkspaceId1729130948686' + + public async up(queryRunner: QueryRunner): Promise { + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`apikey\` + ADD INDEX \`idx_apikey_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_apikey_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for activeWorkspaceId + await queryRunner.query(` + ALTER TABLE \`user\` + ADD INDEX \`idx_user_activeWorkspaceId\` (\`activeWorkspaceId\`), + ADD CONSTRAINT \`fk_user_activeWorkspaceId\` + FOREIGN KEY (\`activeWorkspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`workspace_users\` + ADD INDEX \`idx_workspace_users_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_workspace_users_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`chat_flow\` + ADD INDEX \`idx_chat_flow_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_chat_flow_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`tool\` + ADD INDEX \`idx_tool_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_tool_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`assistant\` + ADD INDEX \`idx_assistant_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_assistant_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`credential\` + ADD INDEX \`idx_credential_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_credential_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`document_store\` + ADD INDEX \`idx_document_store_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_document_store_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`evaluation\` + ADD INDEX \`idx_evaluation_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_evaluation_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`evaluator\` + ADD INDEX \`idx_evaluator_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_evaluator_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`dataset\` + ADD INDEX \`idx_dataset_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_dataset_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`variable\` + ADD INDEX \`idx_variable_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_variable_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`workspace_shared\` + ADD INDEX \`idx_workspace_shared_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_workspace_shared_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`custom_template\` + ADD INDEX \`idx_custom_template_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_custom_template_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + } + + public async down(queryRunner: QueryRunner): Promise { + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`apikey\` + DROP INDEX \`idx_apikey_workspaceId\`, + DROP FOREIGN KEY \`fk_apikey_workspaceId\`; + `) + + // step 1 - drop index and foreign key for activeWorkspaceId + await queryRunner.query(` + ALTER TABLE \`user\` + DROP INDEX \`idx_user_activeWorkspaceId\`, + DROP FOREIGN KEY \`fk_user_activeWorkspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`workspace_users\` + DROP INDEX \`idx_workspace_users_workspaceId\`, + DROP FOREIGN KEY \`fk_workspace_users_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`chat_flow\` + DROP INDEX \`idx_chat_flow_workspaceId\`, + DROP FOREIGN KEY \`fk_chat_flow_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`tool\` + DROP INDEX \`idx_tool_workspaceId\`, + DROP FOREIGN KEY \`fk_tool_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`assistant\` + DROP INDEX \`idx_assistant_workspaceId\`, + DROP FOREIGN KEY \`fk_assistant_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`credential\` + DROP INDEX \`idx_credential_workspaceId\`, + DROP FOREIGN KEY \`fk_credential_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`document_store\` + DROP INDEX \`idx_document_store_workspaceId\`, + DROP FOREIGN KEY \`fk_document_store_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`evaluation\` + DROP INDEX \`idx_evaluation_workspaceId\`, + DROP FOREIGN KEY \`fk_evaluation_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`evaluator\` + DROP INDEX \`idx_evaluator_workspaceId\`, + DROP FOREIGN KEY \`fk_evaluator_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`dataset\` + DROP INDEX \`idx_dataset_workspaceId\`, + DROP FOREIGN KEY \`fk_dataset_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`variable\` + DROP INDEX \`idx_variable_workspaceId\`, + DROP FOREIGN KEY \`fk_variable_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`workspace_shared\` + DROP INDEX \`idx_workspace_shared_workspaceId\`, + DROP FOREIGN KEY \`fk_workspace_shared_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`custom_template\` + DROP INDEX \`idx_custom_template_workspaceId\`, + DROP FOREIGN KEY \`fk_custom_template_workspaceId\`; + `) + } +} diff --git a/packages/server/src/enterprise/database/migrations/mariadb/1729133111652-LinkOrganizationId.ts b/packages/server/src/enterprise/database/migrations/mariadb/1729133111652-LinkOrganizationId.ts new file mode 100644 index 000000000..42ab342a7 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mariadb/1729133111652-LinkOrganizationId.ts @@ -0,0 +1,25 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class LinkOrganizationId1729133111652 implements MigrationInterface { + name = 'LinkOrganizationId1729133111652' + + public async up(queryRunner: QueryRunner): Promise { + // step 1 - add index and foreign key for organizationId + await queryRunner.query(` + ALTER TABLE \`workspace\` + ADD INDEX \`idx_workspace_organizationId\` (\`organizationId\`), + ADD CONSTRAINT \`fk_workspace_organizationId\` + FOREIGN KEY (\`organizationId\`) + REFERENCES \`organization\`(\`id\`); + `) + } + + public async down(queryRunner: QueryRunner): Promise { + // step 1 - drop index and foreign key for organizationId + await queryRunner.query(` + ALTER TABLE \`workspace\` + DROP INDEX \`idx_workspace_organizationId\`, + DROP FOREIGN KEY \`fk_workspace_organizationId\`; + `) + } +} diff --git a/packages/server/src/enterprise/database/migrations/mariadb/1730519457880-AddSSOColumns.ts b/packages/server/src/enterprise/database/migrations/mariadb/1730519457880-AddSSOColumns.ts new file mode 100644 index 000000000..5c00f4357 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mariadb/1730519457880-AddSSOColumns.ts @@ -0,0 +1,16 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { ensureColumnExists } from './mariaDbCustomFunctions' + +export class AddSSOColumns1730519457880 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await ensureColumnExists(queryRunner, 'organization', 'sso_config', 'text') + await ensureColumnExists(queryRunner, 'user', 'user_type', 'varchar(10)') + await ensureColumnExists(queryRunner, 'login_activity', 'login_mode', 'varchar(25)') + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE \`organization\` DROP COLUMN \`sso_config\`;`) + await queryRunner.query(`ALTER TABLE \`user\` DROP COLUMN \`user_type\`;`) + await queryRunner.query(`ALTER TABLE \`login_activity\` DROP COLUMN \`login_mode\`;`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/mariadb/1734074497540-AddPersonalWorkspace.ts b/packages/server/src/enterprise/database/migrations/mariadb/1734074497540-AddPersonalWorkspace.ts new file mode 100644 index 000000000..0a2725779 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mariadb/1734074497540-AddPersonalWorkspace.ts @@ -0,0 +1,29 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { v4 as uuidv4 } from 'uuid' + +export class AddPersonalWorkspace1734074497540 implements MigrationInterface { + name = 'AddPersonalWorkspace1734074497540' + + public async up(queryRunner: QueryRunner): Promise { + const users = await queryRunner.query(`select * from \`user\`;`) + const organization = await queryRunner.query(`select \`id\` from \`organization\`;`) + for (let user of users) { + const workspaceDescription = 'Personal Workspace of ' + user.id + const workspaceId = uuidv4() + + await queryRunner.query(` + insert into \`workspace\` (\`id\`, \`name\`, \`description\`, \`organizationId\`) + values('${workspaceId}', 'Personal Workspace', '${workspaceDescription}', '${organization[0].id}'); + `) + + const workspaceUsersId = uuidv4() + + await queryRunner.query(` + insert into \`workspace_users\` (\`id\`, \`workspaceId\`, \`userId\`, \`role\`) + values('${workspaceUsersId}', '${workspaceId}', '${user.id}', 'pw'); + `) + } + } + + public async down(): Promise {} +} diff --git a/packages/server/src/enterprise/database/migrations/mariadb/1737076223692-RefactorEnterpriseDatabase.ts b/packages/server/src/enterprise/database/migrations/mariadb/1737076223692-RefactorEnterpriseDatabase.ts new file mode 100644 index 000000000..f571a44ae --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mariadb/1737076223692-RefactorEnterpriseDatabase.ts @@ -0,0 +1,494 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { decrypt, encrypt } from '../../../utils/encryption.util' +import { LoginMethodStatus } from '../../entities/login-method.entity' +import { OrganizationUserStatus } from '../../entities/organization-user.entity' +import { OrganizationName } from '../../entities/organization.entity' +import { GeneralRole } from '../../entities/role.entity' +import { UserStatus } from '../../entities/user.entity' +import { WorkspaceUserStatus } from '../../entities/workspace-user.entity' +import { WorkspaceName } from '../../entities/workspace.entity' + +export class RefactorEnterpriseDatabase1737076223692 implements MigrationInterface { + name = 'RefactorEnterpriseDatabase1737076223692' + private async modifyTable(queryRunner: QueryRunner): Promise { + /*------------------------------------- + --------------- user ----------------- + --------------------------------------*/ + // rename user table to temp_user + await queryRunner.query(`alter table \`user\` rename to \`temp_user\`;`) + + // create user table + await queryRunner.query(` + create table \`user\` ( + \`id\` varchar(36) default (uuid()) primary key, + \`name\` varchar(100) not null, + \`email\` varchar(255) not null unique, + \`credential\` text null, + \`tempToken\` text null, + \`tokenExpiry\` timestamp null, + \`status\` varchar(20) default '${UserStatus.UNVERIFIED}' not null, + \`createdDate\` timestamp default now() not null, + \`updatedDate\` timestamp default now() not null, + \`createdBy\` varchar(36) not null, + \`updatedBy\` varchar(36) not null, + constraint \`fk_user_createdBy\` foreign key (\`createdBy\`) references \`user\` (\`id\`), + constraint \`fk_user_updatedBy\` foreign key (\`updatedBy\`) references \`user\` (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_520_ci;; + `) + + /*------------------------------------- + ----------- organization -------------- + --------------------------------------*/ + // rename organization table to temp_organization + await queryRunner.query(`alter table \`organization\` rename to \`temp_organization\`;`) + + // create organization table + await queryRunner.query(` + create table \`organization\` ( + \`id\` varchar(36) default (uuid()) primary key, + \`name\` varchar(100) default '${OrganizationName.DEFAULT_ORGANIZATION}' not null, + \`customerId\` varchar(100) null, + \`subscriptionId\` varchar(100) null, + \`createdDate\` timestamp default now() not null, + \`updatedDate\` timestamp default now() not null, + \`createdBy\` varchar(36) not null, + \`updatedBy\` varchar(36) not null, + constraint \`fk_organization_createdBy\` foreign key (\`createdBy\`) references \`user\` (\`id\`), + constraint \`fk_organization_updatedBy\` foreign key (\`updatedBy\`) references \`user\` (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_520_ci;; + `) + + /*------------------------------------- + ----------- login method -------------- + --------------------------------------*/ + // create login_method table + await queryRunner.query(` + create table \`login_method\` ( + \`id\` varchar(36) default (uuid()) primary key, + \`organizationId\` varchar(36) null, + \`name\` varchar(100) not null, + \`config\` text not null, + \`status\` varchar(20) default '${LoginMethodStatus.ENABLE}' not null, + \`createdDate\` timestamp default now() not null, + \`updatedDate\` timestamp default now() not null, + \`createdBy\` varchar(36) null, + \`updatedBy\` varchar(36) null, + constraint \`fk_login_method_organizationId\` foreign key (\`organizationId\`) references \`organization\` (\`id\`), + constraint \`fk_login_method_createdBy\` foreign key (\`createdBy\`) references \`user\` (\`id\`), + constraint \`fk_login_method_updatedBy\` foreign key (\`updatedBy\`) references \`user\` (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_520_ci;; + `) + + /*------------------------------------- + --------------- role ------------------ + --------------------------------------*/ + // rename roles table to temp_role + await queryRunner.query(`alter table \`roles\` rename to \`temp_role\`;`) + + // create organization_login_method table + await queryRunner.query(` + create table \`role\` ( + \`id\` varchar(36) default (uuid()) primary key, + \`organizationId\` varchar(36) null, + \`name\` varchar(100) not null, + \`description\` text null, + \`permissions\` text not null, + \`createdDate\` timestamp default now() not null, + \`updatedDate\` timestamp default now() not null, + \`createdBy\` varchar(36) null, + \`updatedBy\` varchar(36) null, + constraint \`fk_role_organizationId\` foreign key (\`organizationId\`) references \`organization\` (\`id\`), + constraint \`fk_role_createdBy\` foreign key (\`createdBy\`) references \`user\` (\`id\`), + constraint \`fk_role_updatedBy\` foreign key (\`updatedBy\`) references \`user\` (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_520_ci;; + `) + + /*------------------------------------- + ---------- organization_user ---------- + --------------------------------------*/ + // create organization_user table + await queryRunner.query(` + create table \`organization_user\` ( + \`organizationId\` varchar(36) not null, + \`userId\` varchar(36) not null, + \`roleId\` varchar(36) not null, + \`status\` varchar(20) default '${OrganizationUserStatus.ACTIVE}' not null, + \`createdDate\` timestamp default now() not null, + \`updatedDate\` timestamp default now() not null, + \`createdBy\` varchar(36) not null, + \`updatedBy\` varchar(36) not null, + constraint \`pk_organization_user\` primary key (\`organizationId\`, \`userId\`), + constraint \`fk_organization_user_organizationId\` foreign key (\`organizationId\`) references \`organization\` (\`id\`), + constraint \`fk_organization_user_userId\` foreign key (\`userId\`) references \`user\` (\`id\`), + constraint \`fk_organization_user_roleId\` foreign key (\`roleId\`) references \`role\` (\`id\`), + constraint \`fk_organization_user_createdBy\` foreign key (\`createdBy\`) references \`user\` (\`id\`), + constraint \`fk_organization_user_updatedBy\` foreign key (\`updatedBy\`) references \`user\` (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_520_ci;; + `) + + /*------------------------------------- + ------------- workspace --------------- + --------------------------------------*/ + // modify workspace table + await queryRunner.query(` + alter table \`workspace\` + drop constraint \`fk_workspace_organizationId\`; + `) + + await queryRunner.query(` + alter table \`workspace\` + modify column \`organizationId\` varchar(36) not null, + modify column \`name\` varchar(100), + modify column \`description\` text; + `) + + await queryRunner.query(` + alter table \`workspace\` + add column \`createdBy\` varchar(36) null, + add column \`updatedBy\` varchar(36) null; + `) + + // remove first if needed will be add back, will cause insert to slow + await queryRunner.query(` + drop index \`idx_workspace_organizationId\` on \`workspace\`; + `) + + /*------------------------------------- + ----------- workspace_user ------------ + --------------------------------------*/ + // rename workspace_users table to temp_workspace_user + await queryRunner.query(`alter table \`workspace_users\` rename to \`temp_workspace_user\`;`) + + // create workspace_user table + await queryRunner.query(` + create table \`workspace_user\` ( + \`workspaceId\` varchar(36) not null, + \`userId\` varchar(36) not null, + \`roleId\` varchar(36) not null, + \`status\` varchar(20) default '${WorkspaceUserStatus.INVITED}' not null, + \`lastLogin\` timestamp null, + \`createdDate\` timestamp default now() not null, + \`updatedDate\` timestamp default now() not null, + \`createdBy\` varchar(36) not null, + \`updatedBy\` varchar(36) not null, + constraint \`pk_workspace_user\` primary key (\`workspaceId\`, \`userId\`), + constraint \`fk_workspace_user_workspaceId\` foreign key (\`workspaceId\`) references \`workspace\` (\`id\`), + constraint \`fk_workspace_user_userId\` foreign key (\`userId\`) references \`user\` (\`id\`), + constraint \`fk_workspace_user_roleId\` foreign key (\`roleId\`) references \`role\` (\`id\`), + constraint \`fk_workspace_user_createdBy\` foreign key (\`createdBy\`) references \`user\` (\`id\`), + constraint \`fk_workspace_user_updatedBy\` foreign key (\`updatedBy\`) references \`user\` (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_520_ci;; + `) + } + + private async deleteWorkspaceWithoutUser(queryRunner: QueryRunner) { + const workspaceWithoutUser = await queryRunner.query(` + select w.\`id\` as \`id\` from \`workspace_user\` as \`wu\` + right join \`workspace\` as \`w\` on \`wu\`.\`workspaceId\` = \`w\`.\`id\` + where \`wu\`.\`userId\` is null; + `) + const workspaceIds = workspaceWithoutUser.map((workspace: { id: string }) => `'${workspace.id}'`).join(',') + + // Delete related records from other tables that reference the deleted workspaces + if (workspaceIds && workspaceIds.length > 0) { + await queryRunner.query(` + delete from \`workspace_user\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`apikey\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`assistant\` where \`workspaceId\` in (${workspaceIds}); + `) + const chatflows = await queryRunner.query(` + select id from \`chat_flow\` where \`workspaceId\` in (${workspaceIds}); + `) + const chatflowIds = chatflows.map((chatflow: { id: string }) => `'${chatflow.id}'`).join(',') + if (chatflowIds && chatflowIds.length > 0) { + await queryRunner.query(` + delete from \`chat_flow\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`upsert_history\` where \`chatflowid\` in (${chatflowIds}); + `) + await queryRunner.query(` + delete from \`chat_message\` where \`chatflowid\` in (${chatflowIds}); + `) + await queryRunner.query(` + delete from \`chat_message_feedback\` where \`chatflowid\` in (${chatflowIds}); + `) + } + await queryRunner.query(` + delete from \`credential\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`custom_template\` where \`workspaceId\` in (${workspaceIds}); + `) + const datasets = await queryRunner.query(` + select id from \`dataset\` where \`workspaceId\` in (${workspaceIds}); + `) + const datasetIds = datasets.map((dataset: { id: string }) => `'${dataset.id}'`).join(',') + if (datasetIds && datasetIds.length > 0) { + await queryRunner.query(` + delete from \`dataset\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`dataset_row\` where \`datasetId\` in (${datasetIds}); + `) + } + const documentStores = await queryRunner.query(` + select id from \`document_store\` where \`workspaceId\` in (${workspaceIds}); + `) + const documentStoreIds = documentStores.map((documentStore: { id: string }) => `'${documentStore.id}'`).join(',') + if (documentStoreIds && documentStoreIds.length > 0) { + await queryRunner.query(` + delete from \`document_store\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`document_store_file_chunk\` where \`storeId\` in (${documentStoreIds}); + `) + } + const evaluations = await queryRunner.query(` + select id from \`evaluation\` where \`workspaceId\` in (${workspaceIds}); + `) + const evaluationIds = evaluations.map((evaluation: { id: string }) => `'${evaluation.id}'`).join(',') + if (evaluationIds && evaluationIds.length > 0) { + await queryRunner.query(` + delete from \`evaluation\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`evaluation_run\` where \`evaluationId\` in (${evaluationIds}); + `) + } + await queryRunner.query(` + delete from \`evaluator\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`tool\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`variable\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`workspace_shared\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`workspace\` where \`id\` in (${workspaceIds}); + `) + } + } + + private async populateTable(queryRunner: QueryRunner): Promise { + // insert generalRole + const generalRole = [ + { + name: 'owner', + description: 'Has full control over the organization.', + permissions: '["organization","workspace"]' + }, + { + name: 'member', + description: 'Has limited control over the organization.', + permissions: '[]' + }, + { + name: 'personal workspace', + description: 'Has full control over the personal workspace', + permissions: + '[ "chatflows:view", "chatflows:create", "chatflows:update", "chatflows:duplicate", "chatflows:delete", "chatflows:export", "chatflows:import", "chatflows:config", "chatflows:domains", "agentflows:view", "agentflows:create", "agentflows:update", "agentflows:duplicate", "agentflows:delete", "agentflows:export", "agentflows:import", "agentflows:config", "agentflows:domains", "tools:view", "tools:create", "tools:update", "tools:delete", "tools:export", "assistants:view", "assistants:create", "assistants:update", "assistants:delete", "credentials:view", "credentials:create", "credentials:update", "credentials:delete", "credentials:share", "variables:view", "variables:create", "variables:update", "variables:delete", "apikeys:view", "apikeys:create", "apikeys:update", "apikeys:delete", "apikeys:import", "documentStores:view", "documentStores:create", "documentStores:update", "documentStores:delete", "documentStores:add-loader", "documentStores:delete-loader", "documentStores:preview-process", "documentStores:upsert-config", "datasets:view", "datasets:create", "datasets:update", "datasets:delete", "evaluators:view", "evaluators:create", "evaluators:update", "evaluators:delete", "evaluations:view", "evaluations:create", "evaluations:update", "evaluations:delete", "evaluations:run", "templates:marketplace", "templates:custom", "templates:custom-delete", "templates:toolexport", "templates:flowexport", "templates:custom-share", "workspace:export", "workspace:import", "executions:view", "executions:delete" ]' + } + ] + for (let role of generalRole) { + await queryRunner.query(` + insert into \`role\`(\`name\`, \`description\`, \`permissions\`) + values('${role.name}', '${role.description}', '${role.permissions}'); + `) + } + + const users = await queryRunner.query('select * from `temp_user`;') + const noExistingData = users.length > 0 === false + if (noExistingData) return + + const organizations = await queryRunner.query('select * from `temp_organization`;') + const organizationId = organizations[0].id + const adminUserId = organizations[0].adminUserId + const ssoConfig = organizations[0].sso_config ? JSON.parse(await decrypt(organizations[0].sso_config)).providers : [] + + /*------------------------------------- + --------------- user ----------------- + --------------------------------------*/ + // insert admin user first + await queryRunner.query(` + insert into \`user\` (\`id\`, \`name\`, \`email\`, \`credential\`, \`tempToken\`, \`tokenExpiry\`, \`status\`, \`createdBy\`, \`updatedBy\`) + select tu.\`id\`, coalesce(tu.\`name\`, tu.\`email\`), tu.\`email\`, tu.\`credential\`, tu.\`tempToken\`, tu.\`tokenExpiry\`, tu.\`status\`, + '${adminUserId}', '${adminUserId}' + from \`temp_user\` as \`tu\` where tu.\`id\` = '${adminUserId}'; + `) + + // insert user with temp_user data + await queryRunner.query(` + insert into \`user\` (\`id\`, \`name\`, \`email\`, \`credential\`, \`tempToken\`, \`tokenExpiry\`, \`status\`, \`createdBy\`, \`updatedBy\`) + select tu.\`id\`, coalesce(tu.\`name\`, tu.\`email\`), tu.\`email\`, tu.\`credential\`, tu.\`tempToken\`, tu.\`tokenExpiry\`, tu.\`status\`, + '${adminUserId}', '${adminUserId}' + from \`temp_user\` as \`tu\` where tu.\`id\` != '${adminUserId}'; + `) + + /*------------------------------------- + ----------- organization -------------- + --------------------------------------*/ + // insert organization with temp_organization data + await queryRunner.query(` + insert into \`organization\` (\`id\`, \`name\`, \`createdBy\`, \`updatedBy\`) + select \`id\`, \`name\`, \`adminUserId\`, \`adminUserId\` from \`temp_organization\`; + `) + + /*------------------------------------- + ----------- login method -------------- + --------------------------------------*/ + // insert login_method with temp_organization data + for (let config of ssoConfig) { + const newConfigFormat = { + domain: config.domain === '' || config.domain === undefined ? undefined : config.domain, + tenantID: config.tenantID === '' || config.tenantID === undefined ? undefined : config.tenantID, + clientID: config.clientID === '' || config.clientID === undefined ? undefined : config.clientID, + clientSecret: config.clientSecret === '' || config.clientSecret === undefined ? undefined : config.clientSecret + } + const status = config.configEnabled === true ? LoginMethodStatus.ENABLE : LoginMethodStatus.DISABLE + + const allUndefined = Object.values(newConfigFormat).every((value) => value === undefined) + if (allUndefined && status === LoginMethodStatus.DISABLE) continue + const encryptData = await encrypt(JSON.stringify(newConfigFormat)) + + await queryRunner.query(` + insert into \`login_method\` (\`organizationId\`, \`name\`, \`config\`, \`status\`, \`createdBy\`, \`updatedBy\`) + values('${organizationId}','${config.providerName}','${encryptData}','${status}','${adminUserId}','${adminUserId}'); + `) + } + + /*------------------------------------- + --------------- role ------------------ + --------------------------------------*/ + // insert workspace role into role + const workspaceRole = await queryRunner.query(`select \`id\`, \`name\`, \`description\`, \`permissions\` from \`temp_role\`;`) + for (let role of workspaceRole) { + role.permissions = JSON.stringify(role.permissions.split(',').filter((permission: string) => permission.trim() !== '')) + const haveDescriptionQuery = `insert into \`role\` (\`id\`, \`organizationId\`, \`name\`, \`description\`, \`permissions\`, \`createdBy\`, \`updatedBy\`) + values('${role.id}','${organizationId}','${role.name}','${role.description}','${role.permissions}','${adminUserId}','${adminUserId}');` + const noHaveDescriptionQuery = `insert into \`role\` (\`id\`, \`organizationId\`, \`name\`, \`permissions\`, \`createdBy\`, \`updatedBy\`) + values('${role.id}','${organizationId}','${role.name}','${role.permissions}','${adminUserId}','${adminUserId}');` + const insertRoleQuery = role.description ? haveDescriptionQuery : noHaveDescriptionQuery + await queryRunner.query(insertRoleQuery) + } + + /*------------------------------------- + ---------- organization_user ---------- + --------------------------------------*/ + const roles = await queryRunner.query('select * from `role`;') + // insert organization_user with user, role and temp_organization data + for (let user of users) { + const roleId = + user.id === adminUserId + ? roles.find((role: any) => role.name === GeneralRole.OWNER).id + : roles.find((role: any) => role.name === GeneralRole.MEMBER).id + await queryRunner.query(` + insert into \`organization_user\` (\`organizationId\`, \`userId\`, \`roleId\`, \`status\`, \`createdBy\`, \`updatedBy\`) + values ('${organizationId}','${user.id}','${roleId}','${user.status}','${adminUserId}','${adminUserId}'); + `) + } + + /*------------------------------------- + ------------- workspace --------------- + --------------------------------------*/ + const workspaces = await queryRunner.query('select * from `workspace`;') + for (let workspace of workspaces) { + await queryRunner.query( + `update \`workspace\` set \`createdBy\` = '${adminUserId}', \`updatedBy\` = '${adminUserId}' where \`id\` = '${workspace.id}';` + ) + } + + /*------------------------------------- + ----------- workspace_user ------------ + --------------------------------------*/ + const workspaceUsers = await queryRunner.query('select * from `temp_workspace_user`;') + for (let workspaceUser of workspaceUsers) { + switch (workspaceUser.role) { + case 'org_admin': + workspaceUser.role = roles.find((role: any) => role.name === GeneralRole.OWNER).id + break + case 'pw': + workspaceUser.role = roles.find((role: any) => role.name === GeneralRole.PERSONAL_WORKSPACE).id + break + default: + workspaceUser.role = roles.find((role: any) => role.name === workspaceUser.role).id + break + } + const user = users.find((user: any) => user.id === workspaceUser.userId) + const workspace = workspaces.find((workspace: any) => workspace.id === workspaceUser.workspaceId) + if (workspaceUser.workspaceId === user.activeWorkspaceId && user.lastLogin) { + const lastLogin = new Date(user.lastLogin).toISOString().replace('T', ' ').slice(0, 19) + await queryRunner.query(` + insert into \`workspace_user\` (\`workspaceId\`, \`userId\`, \`roleId\`, \`status\`, \`lastLogin\`,\`createdBy\`, \`updatedBy\`) + values ('${workspaceUser.workspaceId}','${workspaceUser.userId}','${workspaceUser.role}','${WorkspaceUserStatus.ACTIVE}','${lastLogin}','${adminUserId}','${adminUserId}'); + `) + } else if (workspace.name === WorkspaceName.DEFAULT_PERSONAL_WORKSPACE && !user.lastLogin) { + // Skip personal workspaces for users who haven't signed up yet to avoid duplicates when they sign up. + // account.service.ts creates personal workspace during sign-up. + await queryRunner.query(` + delete from \`temp_workspace_user\` where \`workspaceId\` = '${workspaceUser.workspaceId}' and \`userId\` = '${workspaceUser.userId}'; + `) + await queryRunner.query(` + delete from \`workspace\` where \`id\` = '${workspaceUser.workspaceId}'; + `) + } else { + await queryRunner.query(` + insert into \`workspace_user\` (\`workspaceId\`, \`userId\`, \`roleId\`, \`status\`,\`createdBy\`, \`updatedBy\`) + values ('${workspaceUser.workspaceId}','${workspaceUser.userId}','${workspaceUser.role}','${WorkspaceUserStatus.INVITED}','${adminUserId}','${adminUserId}'); + `) + } + } + + await this.deleteWorkspaceWithoutUser(queryRunner) + } + + private async deleteTempTable(queryRunner: QueryRunner): Promise { + await queryRunner.query(` + drop table \`temp_workspace_user\`; + `) + await queryRunner.query(` + drop table \`temp_role\`; + `) + await queryRunner.query(` + drop table \`temp_organization\`; + `) + await queryRunner.query(` + drop table \`temp_user\`; + `) + } + + public async up(queryRunner: QueryRunner): Promise { + await this.modifyTable(queryRunner) + await this.populateTable(queryRunner) + await this.deleteTempTable(queryRunner) + + // This query cannot be part of the modifyTable function because: + // 1. The \`organizationId\` in the \`workspace\` table might be referencing data in the \`temp_organization\` table, so it must be altered last. + // 2. Setting \`createdBy\` and \`updatedBy\` to NOT NULL needs to happen after ensuring thereโ€™s no existing data that would violate the constraint, + // because altering these columns while there is data could prevent new records from being inserted into the \`workspace\` table. + await queryRunner.query(` + alter table \`workspace\` + modify column \`createdBy\` varchar(36) not null, + modify column \`updatedBy\` varchar(36) not null, + add constraint \`fk_organizationId\` foreign key (\`organizationId\`) references \`organization\` (\`id\`), + add constraint \`fk_workspace_createdBy\` foreign key (\`createdBy\`) references \`user\` (\`id\`), + add constraint \`fk_workspace_updatedBy\` foreign key (\`updatedBy\`) references \`user\` (\`id\`); + `) + + // modify evaluation table for average_metrics column to be nullable + await queryRunner.query(` + alter table \`evaluation\` + modify column \`average_metrics\` longtext null; + `) + } + + public async down(): Promise {} +} diff --git a/packages/server/src/enterprise/database/migrations/mariadb/1746862866554-ExecutionLinkWorkspaceId.ts b/packages/server/src/enterprise/database/migrations/mariadb/1746862866554-ExecutionLinkWorkspaceId.ts new file mode 100644 index 000000000..44d4e8855 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mariadb/1746862866554-ExecutionLinkWorkspaceId.ts @@ -0,0 +1,30 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { ensureColumnExists } from './mariaDbCustomFunctions' + +export class ExecutionLinkWorkspaceId1746862866554 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + // step 1 - add workspaceId column + await ensureColumnExists(queryRunner, 'execution', 'workspaceId', 'varchar(36)') + + // step 2 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`execution\` + ADD INDEX \`idx_execution_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_execution_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + } + + public async down(queryRunner: QueryRunner): Promise { + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`execution\` + DROP INDEX \`idx_execution_workspaceId\`, + DROP FOREIGN KEY \`fk_execution_workspaceId\`; + `) + + // step 2 - drop workspaceId column + await queryRunner.query(`ALTER TABLE \`execution\` DROP COLUMN \`workspaceId\`;`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/mariadb/mariaDbCustomFunctions.ts b/packages/server/src/enterprise/database/migrations/mariadb/mariaDbCustomFunctions.ts new file mode 100644 index 000000000..05bc715bd --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mariadb/mariaDbCustomFunctions.ts @@ -0,0 +1,26 @@ +import { QueryRunner } from 'typeorm' + +export const ensureColumnExists = async ( + queryRunner: QueryRunner, + tableName: string, + columnName: string, + columnType: string // Accept column type as a parameter +): Promise => { + // Check if the specified column exists in the given table + const columnCheck = await queryRunner.query( + ` + SELECT COLUMN_NAME + FROM information_schema.COLUMNS + WHERE TABLE_NAME = ? AND COLUMN_NAME = ? AND TABLE_SCHEMA = ? + `, + [tableName, columnName, queryRunner.connection.options.database] + ) + + // Check if the column exists + const columnExists = columnCheck.length > 0 + + if (!columnExists) { + // Add the column if it does not exist + await queryRunner.query(`ALTER TABLE ${tableName} ADD COLUMN ${columnName} ${columnType};`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/mysql/1720230151482-AddAuthTables.ts b/packages/server/src/enterprise/database/migrations/mysql/1720230151482-AddAuthTables.ts new file mode 100644 index 000000000..408b0f533 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mysql/1720230151482-AddAuthTables.ts @@ -0,0 +1,46 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddAuthTables1720230151482 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`user\` ( + \`id\` varchar(36) NOT NULL, + \`name\` varchar(255), + \`role\` varchar(20) NOT NULL, + \`status\` varchar(20) NOT NULL, + \`email\` varchar(100) NOT NULL, + \`credential\` text, + \`tempToken\` text, + \`tokenExpiry\` datetime(6), + \`activeWorkspaceId\` varchar(100), + \`lastLogin\` datetime(6), + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;` + ) + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`roles\` ( + \`id\` varchar(36) NOT NULL, + \`name\` varchar(255), + \`description\` varchar(255), + \`permissions\` text, + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;` + ) + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`login_activity\` ( + \`id\` varchar(36) NOT NULL, + \`username\` varchar(255), + \`message\` varchar(255) NOT NULL, + \`activity_code\` INT NOT NULL, + \`attemptedDateTime\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;` + ) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE user`) + await queryRunner.query(`DROP TABLE roles`) + await queryRunner.query(`DROP TABLE login_activity`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/mysql/1720230151484-AddWorkspace.ts b/packages/server/src/enterprise/database/migrations/mysql/1720230151484-AddWorkspace.ts new file mode 100644 index 000000000..674502c32 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mysql/1720230151484-AddWorkspace.ts @@ -0,0 +1,52 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { ensureColumnExists } from './mysqlCustomFunctions' + +export class AddWorkspace1720230151484 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`workspace\` ( + \`id\` varchar(36) NOT NULL, + \`name\` varchar(255) NOT NULL, + \`description\` varchar(255) DEFAULT NULL, + \`createdDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6), + \`updatedDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;` + ) + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`workspace_users\` ( + \`id\` varchar(36) NOT NULL, + \`workspaceId\` varchar(36) NOT NULL, + \`userId\` varchar(50) NOT NULL, + \`role\` varchar(20) DEFAULT NULL, + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;` + ) + + await ensureColumnExists(queryRunner, 'chat_flow', 'workspaceId', 'varchar(36)') + await ensureColumnExists(queryRunner, 'tool', 'workspaceId', 'varchar(36)') + await ensureColumnExists(queryRunner, 'assistant', 'workspaceId', 'varchar(36)') + await ensureColumnExists(queryRunner, 'credential', 'workspaceId', 'varchar(36)') + await ensureColumnExists(queryRunner, 'document_store', 'workspaceId', 'varchar(36)') + await ensureColumnExists(queryRunner, 'evaluation', 'workspaceId', 'varchar(36)') + await ensureColumnExists(queryRunner, 'evaluator', 'workspaceId', 'varchar(36)') + await ensureColumnExists(queryRunner, 'dataset', 'workspaceId', 'varchar(36)') + await ensureColumnExists(queryRunner, 'apikey', 'workspaceId', 'varchar(36)') + await ensureColumnExists(queryRunner, 'variable', 'workspaceId', 'varchar(36)') + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE workspace`) + await queryRunner.query(`DROP TABLE workspace_users`) + + await queryRunner.query(`ALTER TABLE \`chat_message\` DROP COLUMN \`workspaceId\`;`) + await queryRunner.query(`ALTER TABLE \`tool\` DROP COLUMN \`workspaceId\`;`) + await queryRunner.query(`ALTER TABLE \`assistant\` DROP COLUMN \`workspaceId\`;`) + await queryRunner.query(`ALTER TABLE \`credential\` DROP COLUMN \`workspaceId\`;`) + await queryRunner.query(`ALTER TABLE \`document_store\` DROP COLUMN \`workspaceId\`;`) + await queryRunner.query(`ALTER TABLE \`evaluation\` DROP COLUMN \`workspaceId\`;`) + await queryRunner.query(`ALTER TABLE \`dataset\` DROP COLUMN \`workspaceId\`;`) + await queryRunner.query(`ALTER TABLE \`apikey\` DROP COLUMN \`workspaceId\`;`) + await queryRunner.query(`ALTER TABLE \`variable\` DROP COLUMN \`workspaceId\`;`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/mysql/1726654922034-AddWorkspaceShared.ts b/packages/server/src/enterprise/database/migrations/mysql/1726654922034-AddWorkspaceShared.ts new file mode 100644 index 000000000..2ed824280 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mysql/1726654922034-AddWorkspaceShared.ts @@ -0,0 +1,21 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddWorkspaceShared1726654922034 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`workspace_shared\` ( + \`id\` varchar(36) NOT NULL, + \`workspaceId\` varchar(36) NOT NULL, + \`sharedItemId\` varchar(50) NOT NULL, + \`itemType\` varchar(50) NOT NULL, + \`createdDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6), + \`updatedDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;` + ) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE workspace_shared`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/mysql/1726655750383-AddWorkspaceIdToCustomTemplate.ts b/packages/server/src/enterprise/database/migrations/mysql/1726655750383-AddWorkspaceIdToCustomTemplate.ts new file mode 100644 index 000000000..c6b6c2ab2 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mysql/1726655750383-AddWorkspaceIdToCustomTemplate.ts @@ -0,0 +1,11 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddWorkspaceIdToCustomTemplate1726655750383 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE \`custom_template\` ADD COLUMN \`workspaceId\` varchar(36);`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE \`custom_template\` DROP COLUMN \`workspaceId\`;`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/mysql/1727798417345-AddOrganization.ts b/packages/server/src/enterprise/database/migrations/mysql/1727798417345-AddOrganization.ts new file mode 100644 index 000000000..dd88eca12 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mysql/1727798417345-AddOrganization.ts @@ -0,0 +1,26 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddOrganization1727798417345 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`organization\` ( + \`id\` varchar(36) NOT NULL, + \`name\` varchar(255) NOT NULL, + \`adminUserId\` varchar(255) NULL, + \`defaultWsId\` varchar(255) NULL, + \`organization_type\` varchar(255) NULL, + \`createdDate\` timestamp(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6), + \`updatedDate\` timestamp(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), + PRIMARY KEY (\`id\`), + KEY \`idx_organization_id\` (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;` + ) + await queryRunner.query(`ALTER TABLE \`workspace\` ADD COLUMN \`organizationId\` varchar(36);`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE IF EXISTS \`organization\`;`) + + await queryRunner.query(`ALTER TABLE \`workspace\` DROP COLUMN \`organizationId\`;`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/mysql/1729130948686-LinkWorkspaceId.ts b/packages/server/src/enterprise/database/migrations/mysql/1729130948686-LinkWorkspaceId.ts new file mode 100644 index 000000000..b866afb96 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mysql/1729130948686-LinkWorkspaceId.ts @@ -0,0 +1,233 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class LinkWorkspaceId1729130948686 implements MigrationInterface { + name = 'LinkWorkspaceId1729130948686' + + public async up(queryRunner: QueryRunner): Promise { + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`apikey\` + ADD INDEX \`idx_apikey_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_apikey_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for activeWorkspaceId + await queryRunner.query(` + ALTER TABLE \`user\` + ADD INDEX \`idx_user_activeWorkspaceId\` (\`activeWorkspaceId\`), + ADD CONSTRAINT \`fk_user_activeWorkspaceId\` + FOREIGN KEY (\`activeWorkspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`workspace_users\` + ADD INDEX \`idx_workspace_users_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_workspace_users_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`chat_flow\` + ADD INDEX \`idx_chat_flow_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_chat_flow_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`tool\` + ADD INDEX \`idx_tool_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_tool_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`assistant\` + ADD INDEX \`idx_assistant_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_assistant_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`credential\` + ADD INDEX \`idx_credential_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_credential_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`document_store\` + ADD INDEX \`idx_document_store_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_document_store_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`evaluation\` + ADD INDEX \`idx_evaluation_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_evaluation_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`evaluator\` + ADD INDEX \`idx_evaluator_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_evaluator_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`dataset\` + ADD INDEX \`idx_dataset_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_dataset_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`variable\` + ADD INDEX \`idx_variable_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_variable_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`workspace_shared\` + ADD INDEX \`idx_workspace_shared_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_workspace_shared_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`custom_template\` + ADD INDEX \`idx_custom_template_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_custom_template_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + } + + public async down(queryRunner: QueryRunner): Promise { + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`apikey\` + DROP INDEX \`idx_apikey_workspaceId\`, + DROP FOREIGN KEY \`fk_apikey_workspaceId\`; + `) + + // step 1 - drop index and foreign key for activeWorkspaceId + await queryRunner.query(` + ALTER TABLE \`user\` + DROP INDEX \`idx_user_activeWorkspaceId\`, + DROP FOREIGN KEY \`fk_user_activeWorkspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`workspace_users\` + DROP INDEX \`idx_workspace_users_workspaceId\`, + DROP FOREIGN KEY \`fk_workspace_users_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`chat_flow\` + DROP INDEX \`idx_chat_flow_workspaceId\`, + DROP FOREIGN KEY \`fk_chat_flow_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`tool\` + DROP INDEX \`idx_tool_workspaceId\`, + DROP FOREIGN KEY \`fk_tool_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`assistant\` + DROP INDEX \`idx_assistant_workspaceId\`, + DROP FOREIGN KEY \`fk_assistant_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`credential\` + DROP INDEX \`idx_credential_workspaceId\`, + DROP FOREIGN KEY \`fk_credential_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`document_store\` + DROP INDEX \`idx_document_store_workspaceId\`, + DROP FOREIGN KEY \`fk_document_store_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`evaluation\` + DROP INDEX \`idx_evaluation_workspaceId\`, + DROP FOREIGN KEY \`fk_evaluation_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`evaluator\` + DROP INDEX \`idx_evaluator_workspaceId\`, + DROP FOREIGN KEY \`fk_evaluator_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`dataset\` + DROP INDEX \`idx_dataset_workspaceId\`, + DROP FOREIGN KEY \`fk_dataset_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`variable\` + DROP INDEX \`idx_variable_workspaceId\`, + DROP FOREIGN KEY \`fk_variable_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`workspace_shared\` + DROP INDEX \`idx_workspace_shared_workspaceId\`, + DROP FOREIGN KEY \`fk_workspace_shared_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`custom_template\` + DROP INDEX \`idx_custom_template_workspaceId\`, + DROP FOREIGN KEY \`fk_custom_template_workspaceId\`; + `) + } +} diff --git a/packages/server/src/enterprise/database/migrations/mysql/1729133111652-LinkOrganizationId.ts b/packages/server/src/enterprise/database/migrations/mysql/1729133111652-LinkOrganizationId.ts new file mode 100644 index 000000000..42ab342a7 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mysql/1729133111652-LinkOrganizationId.ts @@ -0,0 +1,25 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class LinkOrganizationId1729133111652 implements MigrationInterface { + name = 'LinkOrganizationId1729133111652' + + public async up(queryRunner: QueryRunner): Promise { + // step 1 - add index and foreign key for organizationId + await queryRunner.query(` + ALTER TABLE \`workspace\` + ADD INDEX \`idx_workspace_organizationId\` (\`organizationId\`), + ADD CONSTRAINT \`fk_workspace_organizationId\` + FOREIGN KEY (\`organizationId\`) + REFERENCES \`organization\`(\`id\`); + `) + } + + public async down(queryRunner: QueryRunner): Promise { + // step 1 - drop index and foreign key for organizationId + await queryRunner.query(` + ALTER TABLE \`workspace\` + DROP INDEX \`idx_workspace_organizationId\`, + DROP FOREIGN KEY \`fk_workspace_organizationId\`; + `) + } +} diff --git a/packages/server/src/enterprise/database/migrations/mysql/1730519457880-AddSSOColumns.ts b/packages/server/src/enterprise/database/migrations/mysql/1730519457880-AddSSOColumns.ts new file mode 100644 index 000000000..c94d29f91 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mysql/1730519457880-AddSSOColumns.ts @@ -0,0 +1,16 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { ensureColumnExists } from './mysqlCustomFunctions' + +export class AddSSOColumns1730519457880 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await ensureColumnExists(queryRunner, 'organization', 'sso_config', 'text') + await ensureColumnExists(queryRunner, 'user', 'user_type', 'varchar(10)') + await ensureColumnExists(queryRunner, 'login_activity', 'login_mode', 'varchar(25)') + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "organization" DROP COLUMN "sso_config";`) + await queryRunner.query(`ALTER TABLE "user" DROP COLUMN "user_type";`) + await queryRunner.query(`ALTER TABLE "login_activity" DROP COLUMN "login_mode";`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/mysql/1734074497540-AddPersonalWorkspace.ts b/packages/server/src/enterprise/database/migrations/mysql/1734074497540-AddPersonalWorkspace.ts new file mode 100644 index 000000000..0a2725779 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mysql/1734074497540-AddPersonalWorkspace.ts @@ -0,0 +1,29 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { v4 as uuidv4 } from 'uuid' + +export class AddPersonalWorkspace1734074497540 implements MigrationInterface { + name = 'AddPersonalWorkspace1734074497540' + + public async up(queryRunner: QueryRunner): Promise { + const users = await queryRunner.query(`select * from \`user\`;`) + const organization = await queryRunner.query(`select \`id\` from \`organization\`;`) + for (let user of users) { + const workspaceDescription = 'Personal Workspace of ' + user.id + const workspaceId = uuidv4() + + await queryRunner.query(` + insert into \`workspace\` (\`id\`, \`name\`, \`description\`, \`organizationId\`) + values('${workspaceId}', 'Personal Workspace', '${workspaceDescription}', '${organization[0].id}'); + `) + + const workspaceUsersId = uuidv4() + + await queryRunner.query(` + insert into \`workspace_users\` (\`id\`, \`workspaceId\`, \`userId\`, \`role\`) + values('${workspaceUsersId}', '${workspaceId}', '${user.id}', 'pw'); + `) + } + } + + public async down(): Promise {} +} diff --git a/packages/server/src/enterprise/database/migrations/mysql/1737076223692-RefactorEnterpriseDatabase.ts b/packages/server/src/enterprise/database/migrations/mysql/1737076223692-RefactorEnterpriseDatabase.ts new file mode 100644 index 000000000..e68db6731 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mysql/1737076223692-RefactorEnterpriseDatabase.ts @@ -0,0 +1,494 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { decrypt, encrypt } from '../../../utils/encryption.util' +import { LoginMethodStatus } from '../../entities/login-method.entity' +import { OrganizationUserStatus } from '../../entities/organization-user.entity' +import { OrganizationName } from '../../entities/organization.entity' +import { GeneralRole } from '../../entities/role.entity' +import { UserStatus } from '../../entities/user.entity' +import { WorkspaceUserStatus } from '../../entities/workspace-user.entity' +import { WorkspaceName } from '../../entities/workspace.entity' + +export class RefactorEnterpriseDatabase1737076223692 implements MigrationInterface { + name = 'RefactorEnterpriseDatabase1737076223692' + private async modifyTable(queryRunner: QueryRunner): Promise { + /*------------------------------------- + --------------- user ----------------- + --------------------------------------*/ + // rename user table to temp_user + await queryRunner.query(`alter table \`user\` rename to \`temp_user\`;`) + + // create user table + await queryRunner.query(` + create table \`user\` ( + \`id\` varchar(36) default (uuid()) primary key, + \`name\` varchar(100) not null, + \`email\` varchar(255) not null unique, + \`credential\` text null, + \`tempToken\` text null, + \`tokenExpiry\` timestamp null, + \`status\` varchar(20) default '${UserStatus.UNVERIFIED}' not null, + \`createdDate\` timestamp default now() not null, + \`updatedDate\` timestamp default now() not null, + \`createdBy\` varchar(36) not null, + \`updatedBy\` varchar(36) not null, + constraint \`fk_user_createdBy\` foreign key (\`createdBy\`) references \`user\` (\`id\`), + constraint \`fk_user_updatedBy\` foreign key (\`updatedBy\`) references \`user\` (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; + `) + + /*------------------------------------- + ----------- organization -------------- + --------------------------------------*/ + // rename organization table to temp_organization + await queryRunner.query(`alter table \`organization\` rename to \`temp_organization\`;`) + + // create organization table + await queryRunner.query(` + create table \`organization\` ( + \`id\` varchar(36) default (uuid()) primary key, + \`name\` varchar(100) default '${OrganizationName.DEFAULT_ORGANIZATION}' not null, + \`customerId\` varchar(100) null, + \`subscriptionId\` varchar(100) null, + \`createdDate\` timestamp default now() not null, + \`updatedDate\` timestamp default now() not null, + \`createdBy\` varchar(36) not null, + \`updatedBy\` varchar(36) not null, + constraint \`fk_organization_createdBy\` foreign key (\`createdBy\`) references \`user\` (\`id\`), + constraint \`fk_organization_updatedBy\` foreign key (\`updatedBy\`) references \`user\` (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; + `) + + /*------------------------------------- + ----------- login method -------------- + --------------------------------------*/ + // create login_method table + await queryRunner.query(` + create table \`login_method\` ( + \`id\` varchar(36) default (uuid()) primary key, + \`organizationId\` varchar(36) null, + \`name\` varchar(100) not null, + \`config\` text not null, + \`status\` varchar(20) default '${LoginMethodStatus.ENABLE}' not null, + \`createdDate\` timestamp default now() not null, + \`updatedDate\` timestamp default now() not null, + \`createdBy\` varchar(36) null, + \`updatedBy\` varchar(36) null, + constraint \`fk_login_method_organizationId\` foreign key (\`organizationId\`) references \`organization\` (\`id\`), + constraint \`fk_login_method_createdBy\` foreign key (\`createdBy\`) references \`user\` (\`id\`), + constraint \`fk_login_method_updatedBy\` foreign key (\`updatedBy\`) references \`user\` (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; + `) + + /*------------------------------------- + --------------- role ------------------ + --------------------------------------*/ + // rename roles table to temp_role + await queryRunner.query(`alter table \`roles\` rename to \`temp_role\`;`) + + // create organization_login_method table + await queryRunner.query(` + create table \`role\` ( + \`id\` varchar(36) default (uuid()) primary key, + \`organizationId\` varchar(36) null, + \`name\` varchar(100) not null, + \`description\` text null, + \`permissions\` text not null, + \`createdDate\` timestamp default now() not null, + \`updatedDate\` timestamp default now() not null, + \`createdBy\` varchar(36) null, + \`updatedBy\` varchar(36) null, + constraint \`fk_role_organizationId\` foreign key (\`organizationId\`) references \`organization\` (\`id\`), + constraint \`fk_role_createdBy\` foreign key (\`createdBy\`) references \`user\` (\`id\`), + constraint \`fk_role_updatedBy\` foreign key (\`updatedBy\`) references \`user\` (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; + `) + + /*------------------------------------- + ---------- organization_user ---------- + --------------------------------------*/ + // create organization_user table + await queryRunner.query(` + create table \`organization_user\` ( + \`organizationId\` varchar(36) not null, + \`userId\` varchar(36) not null, + \`roleId\` varchar(36) not null, + \`status\` varchar(20) default '${OrganizationUserStatus.ACTIVE}' not null, + \`createdDate\` timestamp default now() not null, + \`updatedDate\` timestamp default now() not null, + \`createdBy\` varchar(36) not null, + \`updatedBy\` varchar(36) not null, + constraint \`pk_organization_user\` primary key (\`organizationId\`, \`userId\`), + constraint \`fk_organization_user_organizationId\` foreign key (\`organizationId\`) references \`organization\` (\`id\`), + constraint \`fk_organization_user_userId\` foreign key (\`userId\`) references \`user\` (\`id\`), + constraint \`fk_organization_user_roleId\` foreign key (\`roleId\`) references \`role\` (\`id\`), + constraint \`fk_organization_user_createdBy\` foreign key (\`createdBy\`) references \`user\` (\`id\`), + constraint \`fk_organization_user_updatedBy\` foreign key (\`updatedBy\`) references \`user\` (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; + `) + + /*------------------------------------- + ------------- workspace --------------- + --------------------------------------*/ + // modify workspace table + await queryRunner.query(` + alter table \`workspace\` + drop constraint \`fk_workspace_organizationId\`; + `) + + await queryRunner.query(` + alter table \`workspace\` + modify column \`organizationId\` varchar(36) not null, + modify column \`name\` varchar(100), + modify column \`description\` text; + `) + + await queryRunner.query(` + alter table \`workspace\` + add column \`createdBy\` varchar(36) null, + add column \`updatedBy\` varchar(36) null; + `) + + // remove first if needed will be add back, will cause insert to slow + await queryRunner.query(` + drop index \`idx_workspace_organizationId\` on \`workspace\`; + `) + + /*------------------------------------- + ----------- workspace_user ------------ + --------------------------------------*/ + // rename workspace_users table to temp_workspace_user + await queryRunner.query(`alter table \`workspace_users\` rename to \`temp_workspace_user\`;`) + + // create workspace_user table + await queryRunner.query(` + create table \`workspace_user\` ( + \`workspaceId\` varchar(36) not null, + \`userId\` varchar(36) not null, + \`roleId\` varchar(36) not null, + \`status\` varchar(20) default '${WorkspaceUserStatus.INVITED}' not null, + \`lastLogin\` timestamp null, + \`createdDate\` timestamp default now() not null, + \`updatedDate\` timestamp default now() not null, + \`createdBy\` varchar(36) not null, + \`updatedBy\` varchar(36) not null, + constraint \`pk_workspace_user\` primary key (\`workspaceId\`, \`userId\`), + constraint \`fk_workspace_user_workspaceId\` foreign key (\`workspaceId\`) references \`workspace\` (\`id\`), + constraint \`fk_workspace_user_userId\` foreign key (\`userId\`) references \`user\` (\`id\`), + constraint \`fk_workspace_user_roleId\` foreign key (\`roleId\`) references \`role\` (\`id\`), + constraint \`fk_workspace_user_createdBy\` foreign key (\`createdBy\`) references \`user\` (\`id\`), + constraint \`fk_workspace_user_updatedBy\` foreign key (\`updatedBy\`) references \`user\` (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; + `) + } + + private async deleteWorkspaceWithoutUser(queryRunner: QueryRunner) { + const workspaceWithoutUser = await queryRunner.query(` + select w.\`id\` as \`id\` from \`workspace_user\` as \`wu\` + right join \`workspace\` as \`w\` on \`wu\`.\`workspaceId\` = \`w\`.\`id\` + where \`wu\`.\`userId\` is null; + `) + const workspaceIds = workspaceWithoutUser.map((workspace: { id: string }) => `'${workspace.id}'`).join(',') + + // Delete related records from other tables that reference the deleted workspaces + if (workspaceIds && workspaceIds.length > 0) { + await queryRunner.query(` + delete from \`workspace_user\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`apikey\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`assistant\` where \`workspaceId\` in (${workspaceIds}); + `) + const chatflows = await queryRunner.query(` + select id from \`chat_flow\` where \`workspaceId\` in (${workspaceIds}); + `) + const chatflowIds = chatflows.map((chatflow: { id: string }) => `'${chatflow.id}'`).join(',') + if (chatflowIds && chatflowIds.length > 0) { + await queryRunner.query(` + delete from \`chat_flow\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`upsert_history\` where \`chatflowid\` in (${chatflowIds}); + `) + await queryRunner.query(` + delete from \`chat_message\` where \`chatflowid\` in (${chatflowIds}); + `) + await queryRunner.query(` + delete from \`chat_message_feedback\` where \`chatflowid\` in (${chatflowIds}); + `) + } + await queryRunner.query(` + delete from \`credential\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`custom_template\` where \`workspaceId\` in (${workspaceIds}); + `) + const datasets = await queryRunner.query(` + select id from \`dataset\` where \`workspaceId\` in (${workspaceIds}); + `) + const datasetIds = datasets.map((dataset: { id: string }) => `'${dataset.id}'`).join(',') + if (datasetIds && datasetIds.length > 0) { + await queryRunner.query(` + delete from \`dataset\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`dataset_row\` where \`datasetId\` in (${datasetIds}); + `) + } + const documentStores = await queryRunner.query(` + select id from \`document_store\` where \`workspaceId\` in (${workspaceIds}); + `) + const documentStoreIds = documentStores.map((documentStore: { id: string }) => `'${documentStore.id}'`).join(',') + if (documentStoreIds && documentStoreIds.length > 0) { + await queryRunner.query(` + delete from \`document_store\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`document_store_file_chunk\` where \`storeId\` in (${documentStoreIds}); + `) + } + const evaluations = await queryRunner.query(` + select id from \`evaluation\` where \`workspaceId\` in (${workspaceIds}); + `) + const evaluationIds = evaluations.map((evaluation: { id: string }) => `'${evaluation.id}'`).join(',') + if (evaluationIds && evaluationIds.length > 0) { + await queryRunner.query(` + delete from \`evaluation\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`evaluation_run\` where \`evaluationId\` in (${evaluationIds}); + `) + } + await queryRunner.query(` + delete from \`evaluator\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`tool\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`variable\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`workspace_shared\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`workspace\` where \`id\` in (${workspaceIds}); + `) + } + } + + private async populateTable(queryRunner: QueryRunner): Promise { + // insert generalRole + const generalRole = [ + { + name: 'owner', + description: 'Has full control over the organization.', + permissions: '["organization","workspace"]' + }, + { + name: 'member', + description: 'Has limited control over the organization.', + permissions: '[]' + }, + { + name: 'personal workspace', + description: 'Has full control over the personal workspace', + permissions: + '[ "chatflows:view", "chatflows:create", "chatflows:update", "chatflows:duplicate", "chatflows:delete", "chatflows:export", "chatflows:import", "chatflows:config", "chatflows:domains", "agentflows:view", "agentflows:create", "agentflows:update", "agentflows:duplicate", "agentflows:delete", "agentflows:export", "agentflows:import", "agentflows:config", "agentflows:domains", "tools:view", "tools:create", "tools:update", "tools:delete", "tools:export", "assistants:view", "assistants:create", "assistants:update", "assistants:delete", "credentials:view", "credentials:create", "credentials:update", "credentials:delete", "credentials:share", "variables:view", "variables:create", "variables:update", "variables:delete", "apikeys:view", "apikeys:create", "apikeys:update", "apikeys:delete", "apikeys:import", "documentStores:view", "documentStores:create", "documentStores:update", "documentStores:delete", "documentStores:add-loader", "documentStores:delete-loader", "documentStores:preview-process", "documentStores:upsert-config", "datasets:view", "datasets:create", "datasets:update", "datasets:delete", "evaluators:view", "evaluators:create", "evaluators:update", "evaluators:delete", "evaluations:view", "evaluations:create", "evaluations:update", "evaluations:delete", "evaluations:run", "templates:marketplace", "templates:custom", "templates:custom-delete", "templates:toolexport", "templates:flowexport", "templates:custom-share", "workspace:export", "workspace:import", "executions:view", "executions:delete" ]' + } + ] + for (let role of generalRole) { + await queryRunner.query(` + insert into \`role\`(\`name\`, \`description\`, \`permissions\`) + values('${role.name}', '${role.description}', '${role.permissions}'); + `) + } + + const users = await queryRunner.query('select * from `temp_user`;') + const noExistingData = users.length > 0 === false + if (noExistingData) return + + const organizations = await queryRunner.query('select * from `temp_organization`;') + const organizationId = organizations[0].id + const adminUserId = organizations[0].adminUserId + const ssoConfig = organizations[0].sso_config ? JSON.parse(await decrypt(organizations[0].sso_config)).providers : [] + + /*------------------------------------- + --------------- user ----------------- + --------------------------------------*/ + // insert admin user first + await queryRunner.query(` + insert into \`user\` (\`id\`, \`name\`, \`email\`, \`credential\`, \`tempToken\`, \`tokenExpiry\`, \`status\`, \`createdBy\`, \`updatedBy\`) + select tu.\`id\`, coalesce(tu.\`name\`, tu.\`email\`), tu.\`email\`, tu.\`credential\`, tu.\`tempToken\`, tu.\`tokenExpiry\`, tu.\`status\`, + '${adminUserId}', '${adminUserId}' + from \`temp_user\` as \`tu\` where tu.\`id\` = '${adminUserId}'; + `) + + // insert user with temp_user data + await queryRunner.query(` + insert into \`user\` (\`id\`, \`name\`, \`email\`, \`credential\`, \`tempToken\`, \`tokenExpiry\`, \`status\`, \`createdBy\`, \`updatedBy\`) + select tu.\`id\`, coalesce(tu.\`name\`, tu.\`email\`), tu.\`email\`, tu.\`credential\`, tu.\`tempToken\`, tu.\`tokenExpiry\`, tu.\`status\`, + '${adminUserId}', '${adminUserId}' + from \`temp_user\` as \`tu\` where tu.\`id\` != '${adminUserId}'; + `) + + /*------------------------------------- + ----------- organization -------------- + --------------------------------------*/ + // insert organization with temp_organization data + await queryRunner.query(` + insert into \`organization\` (\`id\`, \`name\`, \`createdBy\`, \`updatedBy\`) + select \`id\`, \`name\`, \`adminUserId\`, \`adminUserId\` from \`temp_organization\`; + `) + + /*------------------------------------- + ----------- login method -------------- + --------------------------------------*/ + // insert login_method with temp_organization data + for (let config of ssoConfig) { + const newConfigFormat = { + domain: config.domain === '' || config.domain === undefined ? undefined : config.domain, + tenantID: config.tenantID === '' || config.tenantID === undefined ? undefined : config.tenantID, + clientID: config.clientID === '' || config.clientID === undefined ? undefined : config.clientID, + clientSecret: config.clientSecret === '' || config.clientSecret === undefined ? undefined : config.clientSecret + } + const status = config.configEnabled === true ? LoginMethodStatus.ENABLE : LoginMethodStatus.DISABLE + + const allUndefined = Object.values(newConfigFormat).every((value) => value === undefined) + if (allUndefined && status === LoginMethodStatus.DISABLE) continue + const encryptData = await encrypt(JSON.stringify(newConfigFormat)) + + await queryRunner.query(` + insert into \`login_method\` (\`organizationId\`, \`name\`, \`config\`, \`status\`, \`createdBy\`, \`updatedBy\`) + values('${organizationId}','${config.providerName}','${encryptData}','${status}','${adminUserId}','${adminUserId}'); + `) + } + + /*------------------------------------- + --------------- role ------------------ + --------------------------------------*/ + // insert workspace role into role + const workspaceRole = await queryRunner.query(`select \`id\`, \`name\`, \`description\`, \`permissions\` from \`temp_role\`;`) + for (let role of workspaceRole) { + role.permissions = JSON.stringify(role.permissions.split(',').filter((permission: string) => permission.trim() !== '')) + const haveDescriptionQuery = `insert into \`role\` (\`id\`, \`organizationId\`, \`name\`, \`description\`, \`permissions\`, \`createdBy\`, \`updatedBy\`) + values('${role.id}','${organizationId}','${role.name}','${role.description}','${role.permissions}','${adminUserId}','${adminUserId}');` + const noHaveDescriptionQuery = `insert into \`role\` (\`id\`, \`organizationId\`, \`name\`, \`permissions\`, \`createdBy\`, \`updatedBy\`) + values('${role.id}','${organizationId}','${role.name}','${role.permissions}','${adminUserId}','${adminUserId}');` + const insertRoleQuery = role.description ? haveDescriptionQuery : noHaveDescriptionQuery + await queryRunner.query(insertRoleQuery) + } + + /*------------------------------------- + ---------- organization_user ---------- + --------------------------------------*/ + const roles = await queryRunner.query('select * from `role`;') + // insert organization_user with user, role and temp_organization data + for (let user of users) { + const roleId = + user.id === adminUserId + ? roles.find((role: any) => role.name === GeneralRole.OWNER).id + : roles.find((role: any) => role.name === GeneralRole.MEMBER).id + await queryRunner.query(` + insert into \`organization_user\` (\`organizationId\`, \`userId\`, \`roleId\`, \`status\`, \`createdBy\`, \`updatedBy\`) + values ('${organizationId}','${user.id}','${roleId}','${user.status}','${adminUserId}','${adminUserId}'); + `) + } + + /*------------------------------------- + ------------- workspace --------------- + --------------------------------------*/ + const workspaces = await queryRunner.query('select * from `workspace`;') + for (let workspace of workspaces) { + await queryRunner.query( + `update \`workspace\` set \`createdBy\` = '${adminUserId}', \`updatedBy\` = '${adminUserId}' where \`id\` = '${workspace.id}';` + ) + } + + /*------------------------------------- + ----------- workspace_user ------------ + --------------------------------------*/ + const workspaceUsers = await queryRunner.query('select * from `temp_workspace_user`;') + for (let workspaceUser of workspaceUsers) { + switch (workspaceUser.role) { + case 'org_admin': + workspaceUser.role = roles.find((role: any) => role.name === GeneralRole.OWNER).id + break + case 'pw': + workspaceUser.role = roles.find((role: any) => role.name === GeneralRole.PERSONAL_WORKSPACE).id + break + default: + workspaceUser.role = roles.find((role: any) => role.name === workspaceUser.role).id + break + } + const user = users.find((user: any) => user.id === workspaceUser.userId) + const workspace = workspaces.find((workspace: any) => workspace.id === workspaceUser.workspaceId) + if (workspaceUser.workspaceId === user.activeWorkspaceId && user.lastLogin) { + const lastLogin = new Date(user.lastLogin).toISOString().replace('T', ' ').slice(0, 19) + await queryRunner.query(` + insert into \`workspace_user\` (\`workspaceId\`, \`userId\`, \`roleId\`, \`status\`, \`lastLogin\`,\`createdBy\`, \`updatedBy\`) + values ('${workspaceUser.workspaceId}','${workspaceUser.userId}','${workspaceUser.role}','${WorkspaceUserStatus.ACTIVE}','${lastLogin}','${adminUserId}','${adminUserId}'); + `) + } else if (workspace.name === WorkspaceName.DEFAULT_PERSONAL_WORKSPACE && !user.lastLogin) { + // Skip personal workspaces for users who haven't signed up yet to avoid duplicates when they sign up. + // account.service.ts creates personal workspace during sign-up. + await queryRunner.query(` + delete from \`temp_workspace_user\` where \`workspaceId\` = '${workspaceUser.workspaceId}' and \`userId\` = '${workspaceUser.userId}'; + `) + await queryRunner.query(` + delete from \`workspace\` where \`id\` = '${workspaceUser.workspaceId}'; + `) + } else { + await queryRunner.query(` + insert into \`workspace_user\` (\`workspaceId\`, \`userId\`, \`roleId\`, \`status\`,\`createdBy\`, \`updatedBy\`) + values ('${workspaceUser.workspaceId}','${workspaceUser.userId}','${workspaceUser.role}','${WorkspaceUserStatus.INVITED}','${adminUserId}','${adminUserId}'); + `) + } + } + + await this.deleteWorkspaceWithoutUser(queryRunner) + } + + private async deleteTempTable(queryRunner: QueryRunner): Promise { + await queryRunner.query(` + drop table \`temp_workspace_user\`; + `) + await queryRunner.query(` + drop table \`temp_role\`; + `) + await queryRunner.query(` + drop table \`temp_organization\`; + `) + await queryRunner.query(` + drop table \`temp_user\`; + `) + } + + public async up(queryRunner: QueryRunner): Promise { + await this.modifyTable(queryRunner) + await this.populateTable(queryRunner) + await this.deleteTempTable(queryRunner) + + // This query cannot be part of the modifyTable function because: + // 1. The \`organizationId\` in the \`workspace\` table might be referencing data in the \`temp_organization\` table, so it must be altered last. + // 2. Setting \`createdBy\` and \`updatedBy\` to NOT NULL needs to happen after ensuring thereโ€™s no existing data that would violate the constraint, + // because altering these columns while there is data could prevent new records from being inserted into the \`workspace\` table. + await queryRunner.query(` + alter table \`workspace\` + modify column \`createdBy\` varchar(36) not null, + modify column \`updatedBy\` varchar(36) not null, + add constraint \`fk_organizationId\` foreign key (\`organizationId\`) references \`organization\` (\`id\`), + add constraint \`fk_workspace_createdBy\` foreign key (\`createdBy\`) references \`user\` (\`id\`), + add constraint \`fk_workspace_updatedBy\` foreign key (\`updatedBy\`) references \`user\` (\`id\`); + `) + + // modify evaluation table for average_metrics column to be nullable + await queryRunner.query(` + alter table \`evaluation\` + modify column \`average_metrics\` longtext null; + `) + } + + public async down(): Promise {} +} diff --git a/packages/server/src/enterprise/database/migrations/mysql/1746862866554-ExecutionLinkWorkspaceId.ts b/packages/server/src/enterprise/database/migrations/mysql/1746862866554-ExecutionLinkWorkspaceId.ts new file mode 100644 index 000000000..12f00e406 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mysql/1746862866554-ExecutionLinkWorkspaceId.ts @@ -0,0 +1,30 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { ensureColumnExists } from './mysqlCustomFunctions' + +export class ExecutionLinkWorkspaceId1746862866554 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + // step 1 - add workspaceId column + await ensureColumnExists(queryRunner, 'execution', 'workspaceId', 'varchar(36)') + + // step 2 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`execution\` + ADD INDEX \`idx_execution_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_execution_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + } + + public async down(queryRunner: QueryRunner): Promise { + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`execution\` + DROP INDEX \`idx_execution_workspaceId\`, + DROP FOREIGN KEY \`fk_execution_workspaceId\`; + `) + + // step 2 - drop workspaceId column + await queryRunner.query(`ALTER TABLE \`execution\` DROP COLUMN \`workspaceId\`;`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/mysql/mysqlCustomFunctions.ts b/packages/server/src/enterprise/database/migrations/mysql/mysqlCustomFunctions.ts new file mode 100644 index 000000000..05bc715bd --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mysql/mysqlCustomFunctions.ts @@ -0,0 +1,26 @@ +import { QueryRunner } from 'typeorm' + +export const ensureColumnExists = async ( + queryRunner: QueryRunner, + tableName: string, + columnName: string, + columnType: string // Accept column type as a parameter +): Promise => { + // Check if the specified column exists in the given table + const columnCheck = await queryRunner.query( + ` + SELECT COLUMN_NAME + FROM information_schema.COLUMNS + WHERE TABLE_NAME = ? AND COLUMN_NAME = ? AND TABLE_SCHEMA = ? + `, + [tableName, columnName, queryRunner.connection.options.database] + ) + + // Check if the column exists + const columnExists = columnCheck.length > 0 + + if (!columnExists) { + // Add the column if it does not exist + await queryRunner.query(`ALTER TABLE ${tableName} ADD COLUMN ${columnName} ${columnType};`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/postgres/1720230151482-AddAuthTables.ts b/packages/server/src/enterprise/database/migrations/postgres/1720230151482-AddAuthTables.ts new file mode 100644 index 000000000..071b97efe --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/postgres/1720230151482-AddAuthTables.ts @@ -0,0 +1,44 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddAuthTables1720230151482 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS "user" ( + id uuid NOT NULL DEFAULT uuid_generate_v4(), + "name" varchar, + "role" varchar NOT NULL, + "credential" text, + "tempToken" text, + "tokenExpiry" timestamp, + "email" varchar NOT NULL, + "status" varchar NOT NULL, + "activeWorkspaceId" varchar, + "lastLogin" timestamp, + CONSTRAINT "PK_98455643dd334f54-9830ab78f9" PRIMARY KEY (id) + );` + ) + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS "roles" ( + id uuid NOT NULL DEFAULT uuid_generate_v4(), + "name" varchar, + "description" varchar, + "permissions" text, + CONSTRAINT "PK_98488643dd3554f54-9830ab78f9" PRIMARY KEY (id) + );` + ) + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS "login_activity" ( + "id" uuid NOT NULL DEFAULT uuid_generate_v4(), + "username" varchar NOT NULL, + "activity_code" integer NOT NULL, + "message" varchar NOT NULL, + "attemptedDateTime" timestamp NOT NULL DEFAULT now());` + ) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE user`) + await queryRunner.query(`DROP TABLE roles`) + await queryRunner.query(`DROP TABLE login_history`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/postgres/1720230151484-AddWorkspace.ts b/packages/server/src/enterprise/database/migrations/postgres/1720230151484-AddWorkspace.ts new file mode 100644 index 000000000..1bd4dac32 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/postgres/1720230151484-AddWorkspace.ts @@ -0,0 +1,52 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddWorkspace1720230151484 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS workspace ( + id uuid NOT NULL DEFAULT uuid_generate_v4(), + "name" varchar NOT NULL, + "description" varchar NULL, + "createdDate" timestamp NOT NULL DEFAULT now(), + "updatedDate" timestamp NOT NULL DEFAULT now(), + CONSTRAINT "PK_98719043dd804f55-9830ab99f8" PRIMARY KEY (id) + );` + ) + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS workspace_users ( + id uuid NOT NULL DEFAULT uuid_generate_v4(), + "workspaceId" varchar NOT NULL, + "userId" varchar NOT NULL, + "role" varchar NULL, + CONSTRAINT "PK_98718943dd804f55-9830ab99f8" PRIMARY KEY (id) + );` + ) + + await queryRunner.query(`ALTER TABLE "chat_flow" ADD COLUMN IF NOT EXISTS "workspaceId" varchar;`) + await queryRunner.query(`ALTER TABLE "tool" ADD COLUMN IF NOT EXISTS "workspaceId" varchar;`) + await queryRunner.query(`ALTER TABLE "assistant" ADD COLUMN IF NOT EXISTS "workspaceId" varchar;`) + await queryRunner.query(`ALTER TABLE "credential" ADD COLUMN IF NOT EXISTS "workspaceId" varchar;`) + await queryRunner.query(`ALTER TABLE "document_store" ADD COLUMN IF NOT EXISTS "workspaceId" varchar;`) + await queryRunner.query(`ALTER TABLE "evaluation" ADD COLUMN IF NOT EXISTS "workspaceId" varchar;`) + await queryRunner.query(`ALTER TABLE "evaluator" ADD COLUMN IF NOT EXISTS "workspaceId" varchar;`) + await queryRunner.query(`ALTER TABLE "dataset" ADD COLUMN IF NOT EXISTS "workspaceId" varchar;`) + await queryRunner.query(`ALTER TABLE "apikey" ADD COLUMN IF NOT EXISTS "workspaceId" varchar;`) + await queryRunner.query(`ALTER TABLE "variable" ADD COLUMN IF NOT EXISTS "workspaceId" varchar;`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE workspace`) + await queryRunner.query(`DROP TABLE workspace_users`) + + await queryRunner.query(`ALTER TABLE "chat_flow" DROP COLUMN "workspaceId";`) + await queryRunner.query(`ALTER TABLE "tool" DROP COLUMN "workspaceId";`) + await queryRunner.query(`ALTER TABLE "assistant" DROP COLUMN "workspaceId";`) + await queryRunner.query(`ALTER TABLE "credential" DROP COLUMN "workspaceId";`) + await queryRunner.query(`ALTER TABLE "document_store" DROP COLUMN "workspaceId";`) + await queryRunner.query(`ALTER TABLE "evaluation" DROP COLUMN "workspaceId";`) + await queryRunner.query(`ALTER TABLE "evaluator" DROP COLUMN "workspaceId";`) + await queryRunner.query(`ALTER TABLE "dataset" DROP COLUMN "workspaceId";`) + await queryRunner.query(`ALTER TABLE "apikey" DROP COLUMN "workspaceId";`) + await queryRunner.query(`ALTER TABLE "variable" DROP COLUMN "workspaceId";`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/postgres/1726654922034-AddWorkspaceShared.ts b/packages/server/src/enterprise/database/migrations/postgres/1726654922034-AddWorkspaceShared.ts new file mode 100644 index 000000000..b1c6b2ef0 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/postgres/1726654922034-AddWorkspaceShared.ts @@ -0,0 +1,21 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddWorkspaceShared1726654922034 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS "workspace_shared" ( + id uuid NOT NULL DEFAULT uuid_generate_v4(), + "workspaceId" varchar NOT NULL, + "sharedItemId" varchar NOT NULL, + "itemType" varchar NOT NULL, + "createdDate" timestamp NOT NULL DEFAULT now(), + "updatedDate" timestamp NOT NULL DEFAULT now(), + CONSTRAINT "PK_90016043dd804f55-9830ab97f8" PRIMARY KEY (id) + );` + ) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE workspace_shared`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/postgres/1726655750383-AddWorkspaceIdToCustomTemplate.ts b/packages/server/src/enterprise/database/migrations/postgres/1726655750383-AddWorkspaceIdToCustomTemplate.ts new file mode 100644 index 000000000..f15338eca --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/postgres/1726655750383-AddWorkspaceIdToCustomTemplate.ts @@ -0,0 +1,11 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddWorkspaceIdToCustomTemplate1726655750383 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "custom_template" ADD COLUMN IF NOT EXISTS "workspaceId" varchar;`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "custom_template" DROP COLUMN "workspaceId";`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/postgres/1727798417345-AddOrganization.ts b/packages/server/src/enterprise/database/migrations/postgres/1727798417345-AddOrganization.ts new file mode 100644 index 000000000..571d23da9 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/postgres/1727798417345-AddOrganization.ts @@ -0,0 +1,25 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddOrganization1727798417345 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS organization ( + id uuid NOT NULL DEFAULT uuid_generate_v4(), + "name" varchar NOT NULL, + "adminUserId" varchar NULL, + "defaultWsId" varchar NULL, + "organization_type" varchar NULL, + "createdDate" timestamp NOT NULL DEFAULT now(), + "updatedDate" timestamp NOT NULL DEFAULT now(), + CONSTRAINT "PK_99619041dd804f00-9830ab99f8" PRIMARY KEY (id) + );` + ) + await queryRunner.query(`ALTER TABLE "workspace" ADD COLUMN IF NOT EXISTS "organizationId" varchar;`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE organization`) + + await queryRunner.query(`ALTER TABLE "workspace" DROP COLUMN "organizationId";`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/postgres/1729130948686-LinkWorkspaceId.ts b/packages/server/src/enterprise/database/migrations/postgres/1729130948686-LinkWorkspaceId.ts new file mode 100644 index 000000000..a31a04560 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/postgres/1729130948686-LinkWorkspaceId.ts @@ -0,0 +1,429 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class LinkWorkspaceId1729130948686 implements MigrationInterface { + name = 'LinkWorkspaceId1729130948686' + + public async up(queryRunner: QueryRunner): Promise { + // step 1 - convert from varchar to UUID type + await queryRunner.query(` + ALTER TABLE "apikey" ALTER COLUMN "workspaceId" SET DATA TYPE UUID USING "workspaceId"::UUID; + `) + + // step 2 - add foreign key constraint + await queryRunner.query(` + ALTER TABLE "apikey" ADD CONSTRAINT "fk_apikey_workspaceId" FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id"); + `) + + // step 3 - create index for workspaceId + await queryRunner.query(` + CREATE INDEX "idx_apikey_workspaceId" ON "apikey"("workspaceId"); + `) + + // step 1 - convert from varchar to UUID type + await queryRunner.query(` + ALTER TABLE "user" ALTER COLUMN "activeWorkspaceId" SET DATA TYPE UUID USING "activeWorkspaceId"::UUID; + `) + + // step 2 - add foreign key constraint + await queryRunner.query(` + ALTER TABLE "user" ADD CONSTRAINT "fk_user_activeWorkspaceId" FOREIGN KEY ("activeWorkspaceId") REFERENCES "workspace"("id"); + `) + + // step 3 - create index for activeWorkspaceId + await queryRunner.query(` + CREATE INDEX "idx_user_activeWorkspaceId" ON "user"("activeWorkspaceId"); + `) + + // step 1 - convert from varchar to UUID type + await queryRunner.query(` + ALTER TABLE "workspace_users" ALTER COLUMN "workspaceId" SET DATA TYPE UUID USING "workspaceId"::UUID; + `) + + // step 2 - add foreign key constraint + await queryRunner.query(` + ALTER TABLE "workspace_users" ADD CONSTRAINT "fk_workspace_users_workspaceId" FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id"); + `) + + // step 3 - create index for workspaceId + await queryRunner.query(` + CREATE INDEX "idx_workspace_users_workspaceId" ON "workspace_users"("workspaceId"); + `) + + // step 1 - convert from varchar to UUID type + await queryRunner.query(` + ALTER TABLE "chat_flow" ALTER COLUMN "workspaceId" SET DATA TYPE UUID USING "workspaceId"::UUID; + `) + + // step 2 - add foreign key constraint + await queryRunner.query(` + ALTER TABLE "chat_flow" ADD CONSTRAINT "fk_chat_flow_workspaceId" FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id"); + `) + + // step 3 - create index for workspaceId + await queryRunner.query(` + CREATE INDEX "idx_chat_flow_workspaceId" ON "chat_flow"("workspaceId"); + `) + + // step 1 - convert from varchar to UUID type + await queryRunner.query(` + ALTER TABLE "tool" ALTER COLUMN "workspaceId" SET DATA TYPE UUID USING "workspaceId"::UUID; + `) + + // step 2 - add foreign key constraint + await queryRunner.query(` + ALTER TABLE "tool" ADD CONSTRAINT "fk_tool_workspaceId" FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id"); + `) + + // step 3 - create index for workspaceId + await queryRunner.query(` + CREATE INDEX "idx_tool_workspaceId" ON "tool"("workspaceId"); + `) + + // step 1 - convert from varchar to UUID type + await queryRunner.query(` + ALTER TABLE "assistant" ALTER COLUMN "workspaceId" SET DATA TYPE UUID USING "workspaceId"::UUID; + `) + + // step 2 - add foreign key constraint + await queryRunner.query(` + ALTER TABLE "assistant" ADD CONSTRAINT "fk_assistant_workspaceId" FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id"); + `) + + // step 3 - create index for workspaceId + await queryRunner.query(` + CREATE INDEX "idx_assistant_workspaceId" ON "assistant"("workspaceId"); + `) + + // step 1 - convert from varchar to UUID type + await queryRunner.query(` + ALTER TABLE "credential" ALTER COLUMN "workspaceId" SET DATA TYPE UUID USING "workspaceId"::UUID; + `) + + // step 2 - add foreign key constraint + await queryRunner.query(` + ALTER TABLE "credential" ADD CONSTRAINT "fk_credential_workspaceId" FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id"); + `) + + // step 3 - create index for workspaceId + await queryRunner.query(` + CREATE INDEX "idx_credential_workspaceId" ON "credential"("workspaceId"); + `) + + // step 1 - convert from varchar to UUID type + await queryRunner.query(` + ALTER TABLE "document_store" ALTER COLUMN "workspaceId" SET DATA TYPE UUID USING "workspaceId"::UUID; + `) + + // step 2 - add foreign key constraint + await queryRunner.query(` + ALTER TABLE "document_store" ADD CONSTRAINT "fk_document_store_workspaceId" FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id"); + `) + + // step 3 - create index for workspaceId + await queryRunner.query(` + CREATE INDEX "idx_document_store_workspaceId" ON "document_store"("workspaceId"); + `) + + // step 1 - convert from varchar to UUID type + await queryRunner.query(` + ALTER TABLE "evaluation" ALTER COLUMN "workspaceId" SET DATA TYPE UUID USING "workspaceId"::UUID; + `) + + // step 2 - add foreign key constraint + await queryRunner.query(` + ALTER TABLE "evaluation" ADD CONSTRAINT "fk_evaluation_workspaceId" FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id"); + `) + + // step 3 - create index for workspaceId + await queryRunner.query(` + CREATE INDEX "idx_evaluation_workspaceId" ON "evaluation"("workspaceId"); + `) + + // step 1 - convert from varchar to UUID type + await queryRunner.query(` + ALTER TABLE "evaluator" ALTER COLUMN "workspaceId" SET DATA TYPE UUID USING "workspaceId"::UUID; + `) + + // step 2 - add foreign key constraint + await queryRunner.query(` + ALTER TABLE "evaluator" ADD CONSTRAINT "fk_evaluator_workspaceId" FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id"); + `) + + // step 3 - create index for workspaceId + await queryRunner.query(` + CREATE INDEX "idx_evaluator_workspaceId" ON "evaluator"("workspaceId"); + `) + + // step 1 - convert from varchar to UUID type + await queryRunner.query(` + ALTER TABLE "dataset" ALTER COLUMN "workspaceId" SET DATA TYPE UUID USING "workspaceId"::UUID; + `) + + // step 2 - add foreign key constraint + await queryRunner.query(` + ALTER TABLE "dataset" ADD CONSTRAINT "fk_dataset_workspaceId" FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id"); + `) + + // step 3 - create index for workspaceId + await queryRunner.query(` + CREATE INDEX "idx_dataset_workspaceId" ON "dataset"("workspaceId"); + `) + + // step 1 - convert from varchar to UUID type + await queryRunner.query(` + ALTER TABLE "variable" ALTER COLUMN "workspaceId" SET DATA TYPE UUID USING "workspaceId"::UUID; + `) + + // step 2 - add foreign key constraint + await queryRunner.query(` + ALTER TABLE "variable" ADD CONSTRAINT "fk_variable_workspaceId" FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id"); + `) + + // step 3 - create index for workspaceId + await queryRunner.query(` + CREATE INDEX "idx_variable_workspaceId" ON "variable"("workspaceId"); + `) + + // step 1 - convert from varchar to UUID type + await queryRunner.query(` + ALTER TABLE "workspace_shared" ALTER COLUMN "workspaceId" SET DATA TYPE UUID USING "workspaceId"::UUID; + `) + + // step 2 - add foreign key constraint + await queryRunner.query(` + ALTER TABLE "workspace_shared" ADD CONSTRAINT "fk_workspace_shared_workspaceId" FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id"); + `) + + // step 3 - create index for workspaceId + await queryRunner.query(` + CREATE INDEX "idx_workspace_shared_workspaceId" ON "workspace_shared"("workspaceId"); + `) + + // step 1 - convert from varchar to UUID type + await queryRunner.query(` + ALTER TABLE "custom_template" ALTER COLUMN "workspaceId" SET DATA TYPE UUID USING "workspaceId"::UUID; + `) + + // step 2 - add foreign key constraint + await queryRunner.query(` + ALTER TABLE "custom_template" ADD CONSTRAINT "fk_custom_template_workspaceId" FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id"); + `) + + // step 3 - create index for workspaceId + await queryRunner.query(` + CREATE INDEX "idx_custom_template_workspaceId" ON "custom_template"("workspaceId"); + `) + } + + public async down(queryRunner: QueryRunner): Promise { + // step 1 - drop index + await queryRunner.query(` + DROP INDEX "idx_apikey_workspaceId"; + `) + + // step 2 - drop foreign key constraint + await queryRunner.query(` + ALTER TABLE "apikey" DROP CONSTRAINT "fk_apikey_workspaceId"; + `) + + // Step 3 - convert from UUID to varchar type + await queryRunner.query(` + ALTER TABLE "apikey" ALTER COLUMN "workspaceId" SET DATA TYPE varchar USING "workspaceId"::varchar; + `) + + // step 1 - drop index + await queryRunner.query(` + DROP INDEX "idx_user_activeWorkspaceId"; + `) + + // step 2 - drop foreign key constraint + await queryRunner.query(` + ALTER TABLE "user" DROP CONSTRAINT "fk_user_activeWorkspaceId"; + `) + + // Step 3 - convert from UUID to varchar type + await queryRunner.query(` + ALTER TABLE "user" ALTER COLUMN "activeWorkspaceId" SET DATA TYPE varchar USING "activeWorkspaceId"::varchar; + `) + + // step 1 - drop index + await queryRunner.query(` + DROP INDEX "idx_workspace_users_workspaceId"; + `) + + // step 2 - drop foreign key constraint + await queryRunner.query(` + ALTER TABLE "workspace_users" DROP CONSTRAINT "fk_workspace_users_workspaceId"; + `) + + // Step 3 - convert from UUID to varchar type + await queryRunner.query(` + ALTER TABLE "workspace_users" ALTER COLUMN "workspaceId" SET DATA TYPE varchar USING "workspaceId"::varchar; + `) + + // step 1 - drop index + await queryRunner.query(` + DROP INDEX "idx_chat_flow_workspaceId"; + `) + + // step 2 - drop foreign key constraint + await queryRunner.query(` + ALTER TABLE "chat_flow" DROP CONSTRAINT "fk_chat_flow_workspaceId"; + `) + + // Step 3 - convert from UUID to varchar type + await queryRunner.query(` + ALTER TABLE "chat_flow" ALTER COLUMN "workspaceId" SET DATA TYPE varchar USING "workspaceId"::varchar; + `) + + // step 1 - drop index + await queryRunner.query(` + DROP INDEX "idx_tool_workspaceId"; + `) + + // step 2 - drop foreign key constraint + await queryRunner.query(` + ALTER TABLE "tool" DROP CONSTRAINT "fk_tool_workspaceId"; + `) + + // Step 3 - convert from UUID to varchar type + await queryRunner.query(` + ALTER TABLE "tool" ALTER COLUMN "workspaceId" SET DATA TYPE varchar USING "workspaceId"::varchar; + `) + + // step 1 - drop index + await queryRunner.query(` + DROP INDEX "idx_assistant_workspaceId"; + `) + + // step 2 - drop foreign key constraint + await queryRunner.query(` + ALTER TABLE "assistant" DROP CONSTRAINT "fk_assistant_workspaceId"; + `) + + // Step 3 - convert from UUID to varchar type + await queryRunner.query(` + ALTER TABLE "assistant" ALTER COLUMN "workspaceId" SET DATA TYPE varchar USING "workspaceId"::varchar; + `) + + // step 1 - drop index + await queryRunner.query(` + DROP INDEX "idx_credential_workspaceId"; + `) + + // step 2 - drop foreign key constraint + await queryRunner.query(` + ALTER TABLE "credential" DROP CONSTRAINT "fk_credential_workspaceId"; + `) + + // Step 3 - convert from UUID to varchar type + await queryRunner.query(` + ALTER TABLE "credential" ALTER COLUMN "workspaceId" SET DATA TYPE varchar USING "workspaceId"::varchar; + `) + + // step 1 - drop index + await queryRunner.query(` + DROP INDEX "idx_document_store_workspaceId"; + `) + + // step 2 - drop foreign key constraint + await queryRunner.query(` + ALTER TABLE "document_store" DROP CONSTRAINT "fk_document_store_workspaceId"; + `) + + // Step 3 - convert from UUID to varchar type + await queryRunner.query(` + ALTER TABLE "document_store" ALTER COLUMN "workspaceId" SET DATA TYPE varchar USING "workspaceId"::varchar; + `) + + // step 1 - drop index + await queryRunner.query(` + DROP INDEX "idx_evaluation_workspaceId"; + `) + + // step 2 - drop foreign key constraint + await queryRunner.query(` + ALTER TABLE "evaluation" DROP CONSTRAINT "fk_evaluation_workspaceId"; + `) + + // Step 3 - convert from UUID to varchar type + await queryRunner.query(` + ALTER TABLE "evaluation" ALTER COLUMN "workspaceId" SET DATA TYPE varchar USING "workspaceId"::varchar; + `) + + // step 1 - drop index + await queryRunner.query(` + DROP INDEX "idx_evaluator_workspaceId"; + `) + + // step 2 - drop foreign key constraint + await queryRunner.query(` + ALTER TABLE "evaluator" DROP CONSTRAINT "fk_evaluator_workspaceId"; + `) + + // Step 3 - convert from UUID to varchar type + await queryRunner.query(` + ALTER TABLE "evaluator" ALTER COLUMN "workspaceId" SET DATA TYPE varchar USING "workspaceId"::varchar; + `) + + // step 1 - drop index + await queryRunner.query(` + DROP INDEX "idx_dataset_workspaceId"; + `) + + // step 2 - drop foreign key constraint + await queryRunner.query(` + ALTER TABLE "dataset" DROP CONSTRAINT "fk_dataset_workspaceId"; + `) + + // Step 3 - convert from UUID to varchar type + await queryRunner.query(` + ALTER TABLE "dataset" ALTER COLUMN "workspaceId" SET DATA TYPE varchar USING "workspaceId"::varchar; + `) + + // step 1 - drop index + await queryRunner.query(` + DROP INDEX "idx_variable_workspaceId"; + `) + + // step 2 - drop foreign key constraint + await queryRunner.query(` + ALTER TABLE "variable" DROP CONSTRAINT "fk_variable_workspaceId"; + `) + + // Step 3 - convert from UUID to varchar type + await queryRunner.query(` + ALTER TABLE "variable" ALTER COLUMN "workspaceId" SET DATA TYPE varchar USING "workspaceId"::varchar; + `) + + // step 1 - drop index + await queryRunner.query(` + DROP INDEX "idx_workspace_shared_workspaceId"; + `) + + // step 2 - drop foreign key constraint + await queryRunner.query(` + ALTER TABLE "workspace_shared" DROP CONSTRAINT "fk_workspace_shared_workspaceId"; + `) + + // Step 3 - convert from UUID to varchar type + await queryRunner.query(` + ALTER TABLE "workspace_shared" ALTER COLUMN "workspaceId" SET DATA TYPE varchar USING "workspaceId"::varchar; + `) + + // step 1 - drop index + await queryRunner.query(` + DROP INDEX "idx_custom_template_workspaceId"; + `) + + // step 2 - drop foreign key constraint + await queryRunner.query(` + ALTER TABLE "custom_template" DROP CONSTRAINT "fk_custom_template_workspaceId"; + `) + + // Step 3 - convert from UUID to varchar type + await queryRunner.query(` + ALTER TABLE "custom_template" ALTER COLUMN "workspaceId" SET DATA TYPE varchar USING "workspaceId"::varchar; + `) + } +} diff --git a/packages/server/src/enterprise/database/migrations/postgres/1729133111652-LinkOrganizationId.ts b/packages/server/src/enterprise/database/migrations/postgres/1729133111652-LinkOrganizationId.ts new file mode 100644 index 000000000..d7b5aa78a --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/postgres/1729133111652-LinkOrganizationId.ts @@ -0,0 +1,39 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class LinkOrganizationId1729133111652 implements MigrationInterface { + name = 'LinkOrganizationId1729133111652' + + public async up(queryRunner: QueryRunner): Promise { + // step 1 - convert from varchar to UUID type + await queryRunner.query(` + ALTER TABLE "workspace" ALTER COLUMN "organizationId" SET DATA TYPE UUID USING "organizationId"::UUID; + `) + + // step 2 - add foreign key constraint + await queryRunner.query(` + ALTER TABLE "workspace" ADD CONSTRAINT "fk_workspace_organizationId" FOREIGN KEY ("organizationId") REFERENCES "organization"("id"); + `) + + // step 3 - create index for organizationId + await queryRunner.query(` + CREATE INDEX "idx_workspace_organizationId" ON "workspace"("organizationId"); + `) + } + + public async down(queryRunner: QueryRunner): Promise { + // step 1 - drop index + await queryRunner.query(` + DROP INDEX "idx_workspace_organizationId"; + `) + + // step 2 - drop foreign key constraint + await queryRunner.query(` + ALTER TABLE "workspace" DROP CONSTRAINT "fk_workspace_organizationId"; + `) + + // Step 3 - convert from UUID to varchar type + await queryRunner.query(` + ALTER TABLE "workspace" ALTER COLUMN "organizationId" SET DATA TYPE varchar USING "organizationId"::varchar; + `) + } +} diff --git a/packages/server/src/enterprise/database/migrations/postgres/1730519457880-AddSSOColumns.ts b/packages/server/src/enterprise/database/migrations/postgres/1730519457880-AddSSOColumns.ts new file mode 100644 index 000000000..77295a572 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/postgres/1730519457880-AddSSOColumns.ts @@ -0,0 +1,15 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddSSOColumns1730519457880 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "organization" ADD COLUMN IF NOT EXISTS "sso_config" text;`) + await queryRunner.query(`ALTER TABLE "user" ADD COLUMN IF NOT EXISTS "user_type" varchar;`) + await queryRunner.query(`ALTER TABLE "login_activity" ADD COLUMN IF NOT EXISTS "login_mode" varchar;`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "organization" DROP COLUMN "sso_config";`) + await queryRunner.query(`ALTER TABLE "user" DROP COLUMN "user_type";`) + await queryRunner.query(`ALTER TABLE "login_activity" DROP COLUMN "login_mode";`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/postgres/1734074497540-AddPersonalWorkspace.ts b/packages/server/src/enterprise/database/migrations/postgres/1734074497540-AddPersonalWorkspace.ts new file mode 100644 index 000000000..d5ecbbe3e --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/postgres/1734074497540-AddPersonalWorkspace.ts @@ -0,0 +1,27 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { v4 as uuidv4 } from 'uuid' + +export class AddPersonalWorkspace1734074497540 implements MigrationInterface { + name = 'AddPersonalWorkspace1734074497540' + + public async up(queryRunner: QueryRunner): Promise { + const users = await queryRunner.query(`select * from "user";`) + const organization = await queryRunner.query(`select "id" from "organization";`) + for (let user of users) { + const workspaceDescription = 'Personal Workspace of ' + user.id + const workspaceId = uuidv4() + + await queryRunner.query(` + insert into "workspace" ("id", "name", "description", "organizationId") + values('${workspaceId}', 'Personal Workspace', '${workspaceDescription}', '${organization[0].id}'); + `) + + await queryRunner.query(` + insert into "workspace_users" ("workspaceId", "userId", "role") + values('${workspaceId}', '${user.id}', 'pw'); + `) + } + } + + public async down(): Promise {} +} diff --git a/packages/server/src/enterprise/database/migrations/postgres/1737076223692-RefactorEnterpriseDatabase.ts b/packages/server/src/enterprise/database/migrations/postgres/1737076223692-RefactorEnterpriseDatabase.ts new file mode 100644 index 000000000..e40749aca --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/postgres/1737076223692-RefactorEnterpriseDatabase.ts @@ -0,0 +1,472 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { decrypt, encrypt } from '../../../utils/encryption.util' +import { LoginMethodStatus } from '../../entities/login-method.entity' +import { OrganizationUserStatus } from '../../entities/organization-user.entity' +import { OrganizationName } from '../../entities/organization.entity' +import { GeneralRole } from '../../entities/role.entity' +import { UserStatus } from '../../entities/user.entity' +import { WorkspaceUserStatus } from '../../entities/workspace-user.entity' +import { WorkspaceName } from '../../entities/workspace.entity' + +export class RefactorEnterpriseDatabase1737076223692 implements MigrationInterface { + name = 'RefactorEnterpriseDatabase1737076223692' + private async modifyTable(queryRunner: QueryRunner): Promise { + /*------------------------------------- + --------------- user ----------------- + --------------------------------------*/ + // rename user table to temp_user + await queryRunner.query(`alter table "user" rename to "temp_user";`) + + // create user table + await queryRunner.query(` + create table "user" ( + "id" uuid default uuid_generate_v4() primary key, + "name" varchar(100) not null, + "email" varchar(255) not null unique, + "credential" text null, + "tempToken" text null, + "tokenExpiry" timestamp null, + "status" varchar(20) default '${UserStatus.UNVERIFIED}' not null, + "createdDate" timestamp default now() not null, + "updatedDate" timestamp default now() not null, + "createdBy" uuid not null, + "updatedBy" uuid not null, + constraint "fk_createdBy" foreign key ("createdBy") references "user" ("id"), + constraint "fk_updatedBy" foreign key ("updatedBy") references "user" ("id") + ); + `) + + /*------------------------------------- + ----------- organization -------------- + --------------------------------------*/ + // rename organization table to temp_organization + await queryRunner.query(`alter table "organization" rename to "temp_organization";`) + + // create organization table + await queryRunner.query(` + create table "organization" ( + "id" uuid default uuid_generate_v4() primary key, + "name" varchar(100) default '${OrganizationName.DEFAULT_ORGANIZATION}' not null, + "customerId" varchar(100) null, + "subscriptionId" varchar(100) null, + "createdDate" timestamp default now() not null, + "updatedDate" timestamp default now() not null, + "createdBy" uuid not null, + "updatedBy" uuid not null, + constraint "fk_createdBy" foreign key ("createdBy") references "user" ("id"), + constraint "fk_updatedBy" foreign key ("updatedBy") references "user" ("id") + ); + `) + + /*------------------------------------- + ----------- login method -------------- + --------------------------------------*/ + // create login_method table + await queryRunner.query(` + create table "login_method" ( + "id" uuid default uuid_generate_v4() primary key, + "organizationId" uuid null, + "name" varchar(100) not null, + "config" text not null, + "status" varchar(20) default '${LoginMethodStatus.ENABLE}' not null, + "createdDate" timestamp default now() not null, + "updatedDate" timestamp default now() not null, + "createdBy" uuid null, + "updatedBy" uuid null, + constraint "fk_organizationId" foreign key ("organizationId") references "organization" ("id"), + constraint "fk_createdBy" foreign key ("createdBy") references "user" ("id"), + constraint "fk_updatedBy" foreign key ("updatedBy") references "user" ("id") + ); + `) + + /*------------------------------------- + --------------- role ------------------ + --------------------------------------*/ + // rename roles table to temp_role + await queryRunner.query(`alter table "roles" rename to "temp_role";`) + + // create organization_login_method table + await queryRunner.query(` + create table "role" ( + "id" uuid default uuid_generate_v4() primary key, + "organizationId" uuid null, + "name" varchar(100) not null, + "description" text null, + "permissions" text not null, + "createdDate" timestamp default now() not null, + "updatedDate" timestamp default now() not null, + "createdBy" uuid null, + "updatedBy" uuid null, + constraint "fk_organizationId" foreign key ("organizationId") references "organization" ("id"), + constraint "fk_createdBy" foreign key ("createdBy") references "user" ("id"), + constraint "fk_updatedBy" foreign key ("updatedBy") references "user" ("id") + ); + `) + + /*------------------------------------- + ---------- organization_user ---------- + --------------------------------------*/ + // create organization_user table + await queryRunner.query(` + create table "organization_user" ( + "organizationId" uuid not null, + "userId" uuid not null, + "roleId" uuid not null, + "status" varchar(20) default '${OrganizationUserStatus.ACTIVE}' not null, + "createdDate" timestamp default now() not null, + "updatedDate" timestamp default now() not null, + "createdBy" uuid not null, + "updatedBy" uuid not null, + constraint "pk_organization_user" primary key ("organizationId", "userId"), + constraint "fk_organizationId" foreign key ("organizationId") references "organization" ("id"), + constraint "fk_userId" foreign key ("userId") references "user" ("id"), + constraint "fk_roleId" foreign key ("roleId") references "role" ("id"), + constraint "fk_createdBy" foreign key ("createdBy") references "user" ("id"), + constraint "fk_updatedBy" foreign key ("updatedBy") references "user" ("id") + ); + `) + + /*------------------------------------- + ------------- workspace --------------- + --------------------------------------*/ + // modify workspace table + await queryRunner.query(` + alter table "workspace" + drop constraint "fk_workspace_organizationId", + alter column "organizationId" set not null, + alter column "name" type varchar(100), + alter column "description" type text, + add column "createdBy" uuid null, + add column "updatedBy" uuid null, + add constraint "fk_createdBy" foreign key ("createdBy") references "user" ("id"), + add constraint "fk_updatedBy" foreign key ("updatedBy") references "user" ("id"); + `) + + // remove first if needed will be add back, will cause insert to slow + await queryRunner.query(` + drop index "idx_workspace_organizationId"; + `) + + /*------------------------------------- + ----------- workspace_user ------------ + --------------------------------------*/ + // rename workspace_users table to temp_workspace_user + await queryRunner.query(`alter table "workspace_users" rename to "temp_workspace_user";`) + + // create workspace_user table + await queryRunner.query(` + create table "workspace_user" ( + "workspaceId" uuid not null, + "userId" uuid not null, + "roleId" uuid not null, + "status" varchar(20) default '${WorkspaceUserStatus.INVITED}' not null, + "lastLogin" timestamp null, + "createdDate" timestamp default now() not null, + "updatedDate" timestamp default now() not null, + "createdBy" uuid not null, + "updatedBy" uuid not null, + constraint "pk_workspace_user" primary key ("workspaceId", "userId"), + constraint "fk_workspaceId" foreign key ("workspaceId") references "workspace" ("id"), + constraint "fk_userId" foreign key ("userId") references "user" ("id"), + constraint "fk_roleId" foreign key ("roleId") references "role" ("id"), + constraint "fk_createdBy" foreign key ("createdBy") references "user" ("id"), + constraint "fk_updatedBy" foreign key ("updatedBy") references "user" ("id") + ); + `) + } + + private async deleteWorkspaceWithoutUser(queryRunner: QueryRunner) { + const workspaceWithoutUser = await queryRunner.query(` + select w."id" as "id" from "workspace_user" as "wu" + right join "workspace" as "w" on "wu"."workspaceId" = "w"."id" + where "wu"."userId" is null; + `) + const workspaceIds = workspaceWithoutUser.map((workspace: { id: string }) => `'${workspace.id}'`).join(',') + + // Delete related records from other tables that reference the deleted workspaces + if (workspaceIds && workspaceIds.length > 0) { + await queryRunner.query(` + delete from "workspace_user" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "apikey" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "assistant" where "workspaceId" in (${workspaceIds}); + `) + const chatflows = await queryRunner.query(` + select id from "chat_flow" where "workspaceId" in (${workspaceIds}); + `) + const chatflowIds = chatflows.map((chatflow: { id: string }) => `'${chatflow.id}'`).join(',') + if (chatflowIds && chatflowIds.length > 0) { + await queryRunner.query(` + delete from "chat_flow" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "upsert_history" where "chatflowid" in (${chatflowIds}); + `) + await queryRunner.query(` + delete from "chat_message" where "chatflowid" in (${chatflowIds}); + `) + await queryRunner.query(` + delete from "chat_message_feedback" where "chatflowid" in (${chatflowIds}); + `) + } + await queryRunner.query(` + delete from "credential" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "custom_template" where "workspaceId" in (${workspaceIds}); + `) + const datasets = await queryRunner.query(` + select id from "dataset" where "workspaceId" in (${workspaceIds}); + `) + const datasetIds = datasets.map((dataset: { id: string }) => `'${dataset.id}'`).join(',') + if (datasetIds && datasetIds.length > 0) { + await queryRunner.query(` + delete from "dataset" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "dataset_row" where "datasetId" in (${datasetIds}); + `) + } + const documentStores = await queryRunner.query(` + select id from "document_store" where "workspaceId" in (${workspaceIds}); + `) + const documentStoreIds = documentStores.map((documentStore: { id: string }) => `'${documentStore.id}'`).join(',') + if (documentStoreIds && documentStoreIds.length > 0) { + await queryRunner.query(` + delete from "document_store" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "document_store_file_chunk" where "storeId" in (${documentStoreIds}); + `) + } + const evaluations = await queryRunner.query(` + select id from "evaluation" where "workspaceId" in (${workspaceIds}); + `) + const evaluationIds = evaluations.map((evaluation: { id: string }) => `'${evaluation.id}'`).join(',') + if (evaluationIds && evaluationIds.length > 0) { + await queryRunner.query(` + delete from "evaluation" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "evaluation_run" where "evaluationId" in (${evaluationIds}); + `) + } + await queryRunner.query(` + delete from "evaluator" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "tool" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "variable" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "workspace_shared" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "workspace" where "id" in (${workspaceIds}); + `) + } + } + + private async populateTable(queryRunner: QueryRunner): Promise { + // insert generalRole + const generalRole = [ + { + name: 'owner', + description: 'Has full control over the organization.', + permissions: '["organization","workspace"]' + }, + { + name: 'member', + description: 'Has limited control over the organization.', + permissions: '[]' + }, + { + name: 'personal workspace', + description: 'Has full control over the personal workspace', + permissions: + '[ "chatflows:view", "chatflows:create", "chatflows:update", "chatflows:duplicate", "chatflows:delete", "chatflows:export", "chatflows:import", "chatflows:config", "chatflows:domains", "agentflows:view", "agentflows:create", "agentflows:update", "agentflows:duplicate", "agentflows:delete", "agentflows:export", "agentflows:import", "agentflows:config", "agentflows:domains", "tools:view", "tools:create", "tools:update", "tools:delete", "tools:export", "assistants:view", "assistants:create", "assistants:update", "assistants:delete", "credentials:view", "credentials:create", "credentials:update", "credentials:delete", "credentials:share", "variables:view", "variables:create", "variables:update", "variables:delete", "apikeys:view", "apikeys:create", "apikeys:update", "apikeys:delete", "apikeys:import", "documentStores:view", "documentStores:create", "documentStores:update", "documentStores:delete", "documentStores:add-loader", "documentStores:delete-loader", "documentStores:preview-process", "documentStores:upsert-config", "datasets:view", "datasets:create", "datasets:update", "datasets:delete", "evaluators:view", "evaluators:create", "evaluators:update", "evaluators:delete", "evaluations:view", "evaluations:create", "evaluations:update", "evaluations:delete", "evaluations:run", "templates:marketplace", "templates:custom", "templates:custom-delete", "templates:toolexport", "templates:flowexport", "templates:custom-share", "workspace:export", "workspace:import", "executions:view", "executions:delete" ]' + } + ] + for (let role of generalRole) { + await queryRunner.query(` + insert into "role"("name", "description", "permissions") + values('${role.name}', '${role.description}', '${role.permissions}'); + `) + } + + const users = await queryRunner.query('select * from "temp_user";') + const noExistingData = users.length > 0 === false + if (noExistingData) return + + const organizations = await queryRunner.query('select * from "temp_organization";') + const organizationId = organizations[0].id + const adminUserId = organizations[0].adminUserId + const ssoConfig = organizations[0].sso_config ? JSON.parse(await decrypt(organizations[0].sso_config)).providers : [] + + /*------------------------------------- + --------------- user ----------------- + --------------------------------------*/ + // insert user with temp_user data + await queryRunner.query(` + insert into "user" ("id", "name", "email", "credential", "tempToken", "tokenExpiry", "status", "createdBy", "updatedBy") + select tu."id", coalesce(tu."name", tu."email"), tu."email", tu."credential", tu."tempToken", tu."tokenExpiry", tu."status", + '${adminUserId}', '${adminUserId}' + from "temp_user" as "tu"; + `) + + /*------------------------------------- + ----------- organization -------------- + --------------------------------------*/ + // insert organization with temp_organization data + await queryRunner.query(` + insert into "organization" ("id", "name", "createdBy", "updatedBy") + select "id", "name", "adminUserId"::uuid, "adminUserId"::uuid from "temp_organization"; + `) + + /*------------------------------------- + ----------- login method -------------- + --------------------------------------*/ + // insert login_method with temp_organization data + for (let config of ssoConfig) { + const newConfigFormat = { + domain: config.domain === '' || config.domain === undefined ? undefined : config.domain, + tenantID: config.tenantID === '' || config.tenantID === undefined ? undefined : config.tenantID, + clientID: config.clientID === '' || config.clientID === undefined ? undefined : config.clientID, + clientSecret: config.clientSecret === '' || config.clientSecret === undefined ? undefined : config.clientSecret + } + const status = config.configEnabled === true ? LoginMethodStatus.ENABLE : LoginMethodStatus.DISABLE + + const allUndefined = Object.values(newConfigFormat).every((value) => value === undefined) + if (allUndefined && status === LoginMethodStatus.DISABLE) continue + const encryptData = await encrypt(JSON.stringify(newConfigFormat)) + + await queryRunner.query(` + insert into "login_method" ("organizationId", "name", "config", "status", "createdBy", "updatedBy") + values('${organizationId}','${config.providerName}','${encryptData}','${status}','${adminUserId}','${adminUserId}'); + `) + } + + /*------------------------------------- + --------------- role ------------------ + --------------------------------------*/ + // insert workspace role into role + const workspaceRole = await queryRunner.query(`select "id", "name", "description", "permissions" from "temp_role";`) + for (let role of workspaceRole) { + role.permissions = JSON.stringify(role.permissions.split(',').filter((permission: string) => permission.trim() !== '')) + const haveDescriptionQuery = `insert into "role" ("id", "organizationId", "name", "description", "permissions", "createdBy", "updatedBy") + values('${role.id}','${organizationId}','${role.name}','${role.description}','${role.permissions}','${adminUserId}','${adminUserId}');` + const noHaveDescriptionQuery = `insert into "role" ("id", "organizationId", "name", "permissions", "createdBy", "updatedBy") + values('${role.id}','${organizationId}','${role.name}','${role.permissions}','${adminUserId}','${adminUserId}');` + const insertRoleQuery = role.description ? haveDescriptionQuery : noHaveDescriptionQuery + await queryRunner.query(insertRoleQuery) + } + + /*------------------------------------- + ---------- organization_user ---------- + --------------------------------------*/ + const roles = await queryRunner.query('select * from "role";') + // insert organization_user with user, role and temp_organization data + for (let user of users) { + const roleId = + user.id === adminUserId + ? roles.find((role: any) => role.name === GeneralRole.OWNER).id + : roles.find((role: any) => role.name === GeneralRole.MEMBER).id + await queryRunner.query(` + insert into "organization_user" ("organizationId", "userId", "roleId", "status", "createdBy", "updatedBy") + values ('${organizationId}','${user.id}','${roleId}','${user.status}','${adminUserId}','${adminUserId}'); + `) + } + + /*------------------------------------- + ------------- workspace --------------- + --------------------------------------*/ + const workspaces = await queryRunner.query('select * from "workspace";') + for (let workspace of workspaces) { + await queryRunner.query( + `update "workspace" set "createdBy" = '${adminUserId}', "updatedBy" = '${adminUserId}' where "id" = '${workspace.id}';` + ) + } + + /*------------------------------------- + ----------- workspace_user ------------ + --------------------------------------*/ + const workspaceUsers = await queryRunner.query('select * from "temp_workspace_user";') + for (let workspaceUser of workspaceUsers) { + switch (workspaceUser.role) { + case 'org_admin': + workspaceUser.role = roles.find((role: any) => role.name === GeneralRole.OWNER).id + break + case 'pw': + workspaceUser.role = roles.find((role: any) => role.name === GeneralRole.PERSONAL_WORKSPACE).id + break + default: + workspaceUser.role = roles.find((role: any) => role.name === workspaceUser.role).id + break + } + const user = users.find((user: any) => user.id === workspaceUser.userId) + const workspace = workspaces.find((workspace: any) => workspace.id === workspaceUser.workspaceId) + if (workspaceUser.workspaceId === user.activeWorkspaceId && user.lastLogin) { + const lastLogin = new Date(user.lastLogin).toISOString() + await queryRunner.query(` + insert into "workspace_user" ("workspaceId", "userId", "roleId", "status", "lastLogin","createdBy", "updatedBy") + values ('${workspaceUser.workspaceId}','${workspaceUser.userId}','${workspaceUser.role}','${WorkspaceUserStatus.ACTIVE}','${lastLogin}','${adminUserId}','${adminUserId}'); + `) + } else if (workspace.name === WorkspaceName.DEFAULT_PERSONAL_WORKSPACE && !user.lastLogin) { + // Skip personal workspaces for users who haven't signed up yet to avoid duplicates when they sign up. + // account.service.ts creates personal workspace during sign-up. + await queryRunner.query(` + delete from "temp_workspace_user" where "workspaceId" = '${workspaceUser.workspaceId}' and "userId" = '${workspaceUser.userId}'; + `) + await queryRunner.query(` + delete from "workspace" where "id" = '${workspaceUser.workspaceId}'; + `) + } else { + await queryRunner.query(` + insert into "workspace_user" ("workspaceId", "userId", "roleId", "status","createdBy", "updatedBy") + values ('${workspaceUser.workspaceId}','${workspaceUser.userId}','${workspaceUser.role}','${WorkspaceUserStatus.INVITED}','${adminUserId}','${adminUserId}'); + `) + } + } + + await this.deleteWorkspaceWithoutUser(queryRunner) + } + + private async deleteTempTable(queryRunner: QueryRunner): Promise { + await queryRunner.query(` + drop table "temp_workspace_user"; + `) + await queryRunner.query(` + drop table "temp_role"; + `) + await queryRunner.query(` + drop table "temp_organization"; + `) + await queryRunner.query(` + drop table "temp_user"; + `) + } + + public async up(queryRunner: QueryRunner): Promise { + await this.modifyTable(queryRunner) + await this.populateTable(queryRunner) + await this.deleteTempTable(queryRunner) + + // This query cannot be part of the modifyTable function because: + // 1. The "organizationId" in the "workspace" table might be referencing data in the "temp_organization" table, so it must be altered last. + // 2. Setting "createdBy" and "updatedBy" to NOT NULL needs to happen after ensuring thereโ€™s no existing data that would violate the constraint, + // because altering these columns while there is data could prevent new records from being inserted into the "workspace" table. + await queryRunner.query(` + alter table "workspace" + alter column "createdBy" set not null, + alter column "updatedBy" set not null, + add constraint "fk_organizationId" foreign key ("organizationId") references "organization" ("id"); + `) + } + + public async down(): Promise {} +} diff --git a/packages/server/src/enterprise/database/migrations/postgres/1746862866554-ExecutionLinkWorkspaceId.ts b/packages/server/src/enterprise/database/migrations/postgres/1746862866554-ExecutionLinkWorkspaceId.ts new file mode 100644 index 000000000..2c8798f63 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/postgres/1746862866554-ExecutionLinkWorkspaceId.ts @@ -0,0 +1,43 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class ExecutionLinkWorkspaceId1746862866554 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + // step 1 - add workspaceId column + await queryRunner.query(`ALTER TABLE "execution" ADD COLUMN IF NOT EXISTS "workspaceId" varchar;`) + + // step 2 - convert from varchar to UUID type + await queryRunner.query(` + ALTER TABLE "execution" ALTER COLUMN "workspaceId" SET DATA TYPE UUID USING "workspaceId"::UUID; + `) + + // step 3 - add foreign key constraint + await queryRunner.query(` + ALTER TABLE "execution" ADD CONSTRAINT "fk_execution_workspaceId" FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id"); + `) + + // step 4 - create index for workspaceId + await queryRunner.query(` + CREATE INDEX "idx_execution_workspaceId" ON "execution"("workspaceId"); + `) + } + + public async down(queryRunner: QueryRunner): Promise { + // step 1 - drop index + await queryRunner.query(` + DROP INDEX "idx_execution_workspaceId"; + `) + + // step 2 - drop foreign key constraint + await queryRunner.query(` + ALTER TABLE "execution" DROP CONSTRAINT "fk_execution_workspaceId"; + `) + + // step 3 - convert from UUID to varchar type + await queryRunner.query(` + ALTER TABLE "execution" ALTER COLUMN "workspaceId" SET DATA TYPE varchar USING "workspaceId"::varchar; + `) + + // step 4 - drop workspaceId column + await queryRunner.query(`ALTER TABLE "execution" DROP COLUMN "workspaceId";`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/sqlite/1720230151482-AddAuthTables.ts b/packages/server/src/enterprise/database/migrations/sqlite/1720230151482-AddAuthTables.ts new file mode 100644 index 000000000..3370af6e7 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/sqlite/1720230151482-AddAuthTables.ts @@ -0,0 +1,40 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddAuthTables1720230151482 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS "user" ( + "id" varchar PRIMARY KEY NOT NULL, + "role" varchar NOT NULL, + "name" varchar, + "credential" text, + "tempToken" text, + "tokenExpiry" datetime, + "email" varchar NOT NULL, + "status" varchar NOT NULL, + "activeWorkspaceId" varchar NOT NULL, + "lastLogin" datetime);` + ) + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS "roles" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" varchar, + "description" varchar, + "permissions" text);` + ) + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS "login_activity" ( + "id" varchar PRIMARY KEY NOT NULL, + "username" varchar NOT NULL, + "activity_code" integer NOT NULL, + "message" varchar NOT NULL, + "attemptedDateTime" datetime NOT NULL DEFAULT (datetime('now')));` + ) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE user`) + await queryRunner.query(`DROP TABLE roles`) + await queryRunner.query(`DROP TABLE login_activity`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/sqlite/1720230151484-AddWorkspace.ts b/packages/server/src/enterprise/database/migrations/sqlite/1720230151484-AddWorkspace.ts new file mode 100644 index 000000000..5718b6a53 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/sqlite/1720230151484-AddWorkspace.ts @@ -0,0 +1,47 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { ensureColumnExists } from './sqlliteCustomFunctions' + +export class AddWorkspace1720230151484 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS "workspace" ("id" varchar PRIMARY KEY NOT NULL, +"name" text NOT NULL, +"description" varchar, +"createdDate" datetime NOT NULL DEFAULT (datetime('now')), +"updatedDate" datetime NOT NULL DEFAULT (datetime('now')));` + ) + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS "workspace_users" ("id" varchar PRIMARY KEY NOT NULL, +"workspaceId" varchar NOT NULL, +"userId" varchar NOT NULL, +"role" varchar NOT NULL);` + ) + + await ensureColumnExists(queryRunner, 'chat_flow', 'workspaceId', 'TEXT') + await ensureColumnExists(queryRunner, 'tool', 'workspaceId', 'TEXT') + await ensureColumnExists(queryRunner, 'assistant', 'workspaceId', 'TEXT') + await ensureColumnExists(queryRunner, 'credential', 'workspaceId', 'TEXT') + await ensureColumnExists(queryRunner, 'document_store', 'workspaceId', 'TEXT') + await ensureColumnExists(queryRunner, 'evaluation', 'workspaceId', 'TEXT') + await ensureColumnExists(queryRunner, 'evaluator', 'workspaceId', 'TEXT') + await ensureColumnExists(queryRunner, 'dataset', 'workspaceId', 'TEXT') + await ensureColumnExists(queryRunner, 'apikey', 'workspaceId', 'TEXT') + await ensureColumnExists(queryRunner, 'variable', 'workspaceId', 'TEXT') + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE workspace`) + await queryRunner.query(`DROP TABLE workspace_users`) + + await queryRunner.query(`ALTER TABLE "chat_flow" DROP COLUMN "workspaceId";`) + await queryRunner.query(`ALTER TABLE "tool" DROP COLUMN "workspaceId";`) + await queryRunner.query(`ALTER TABLE "assistant" DROP COLUMN "workspaceId";`) + await queryRunner.query(`ALTER TABLE "credential" DROP COLUMN "workspaceId";`) + await queryRunner.query(`ALTER TABLE "document_store" DROP COLUMN "workspaceId";`) + await queryRunner.query(`ALTER TABLE "evaluation" DROP COLUMN "workspaceId";`) + await queryRunner.query(`ALTER TABLE "evaluator" DROP COLUMN "workspaceId";`) + await queryRunner.query(`ALTER TABLE "dataset" DROP COLUMN "workspaceId";`) + await queryRunner.query(`ALTER TABLE "apikey" DROP COLUMN "workspaceId";`) + await queryRunner.query(`ALTER TABLE "variable" DROP COLUMN "workspaceId";`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/sqlite/1726654922034-AddWorkspaceShared.ts b/packages/server/src/enterprise/database/migrations/sqlite/1726654922034-AddWorkspaceShared.ts new file mode 100644 index 000000000..301077981 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/sqlite/1726654922034-AddWorkspaceShared.ts @@ -0,0 +1,19 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddWorkspaceShared1726654922034 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS "workspace_shared" ( + "id" varchar PRIMARY KEY NOT NULL, + "workspaceId" varchar NOT NULL, + "sharedItemId" varchar NOT NULL, + "itemType" varchar NOT NULL, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')));` + ) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE workspace_shared`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/sqlite/1726655750383-AddWorkspaceIdToCustomTemplate.ts b/packages/server/src/enterprise/database/migrations/sqlite/1726655750383-AddWorkspaceIdToCustomTemplate.ts new file mode 100644 index 000000000..ab6efb66e --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/sqlite/1726655750383-AddWorkspaceIdToCustomTemplate.ts @@ -0,0 +1,11 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddWorkspaceIdToCustomTemplate1726655750383 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "custom_template" ADD COLUMN "workspaceId" TEXT;`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "custom_template" DROP COLUMN "workspaceId";`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/sqlite/1727798417345-AddOrganization.ts b/packages/server/src/enterprise/database/migrations/sqlite/1727798417345-AddOrganization.ts new file mode 100644 index 000000000..79c08aa6c --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/sqlite/1727798417345-AddOrganization.ts @@ -0,0 +1,24 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { ensureColumnExists } from './sqlliteCustomFunctions' + +export class AddOrganization1727798417345 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS "organization" ("id" varchar PRIMARY KEY NOT NULL, +"name" text NOT NULL, +"adminUserId" text, +"defaultWsId" text, +"organization_type" text, +"createdDate" datetime NOT NULL DEFAULT (datetime('now')), +"updatedDate" datetime NOT NULL DEFAULT (datetime('now')));` + ) + + await ensureColumnExists(queryRunner, 'workspace', 'organizationId', 'varchar') + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE organization`) + + await queryRunner.query(`ALTER TABLE "workspace" DROP COLUMN "organizationId";`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/sqlite/1729130948686-LinkWorkspaceId.ts b/packages/server/src/enterprise/database/migrations/sqlite/1729130948686-LinkWorkspaceId.ts new file mode 100644 index 000000000..3204e8213 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/sqlite/1729130948686-LinkWorkspaceId.ts @@ -0,0 +1,874 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export async function linkWorkspaceId(queryRunner: QueryRunner, include = true) { + /*------------------------------------- + ---------------- ApiKey --------------- + --------------------------------------*/ + // step 1 - create temp table with workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_apikey" ( + "id" varchar PRIMARY KEY NOT NULL, + "apiKey" varchar NOT NULL, + "apiSecret" varchar NOT NULL, + "keyName" varchar NOT NULL, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "workspaceId" varchar, + FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id") + ); + `) + + // step 2 - create index for workspaceId in temp_apikey table + await queryRunner.query(`CREATE INDEX IF NOT EXISTS "idx_apikey_workspaceId" ON "temp_apikey"("workspaceId");`) + + // step 3 - migrate data + await queryRunner.query(` + INSERT INTO "temp_apikey" ("id", "apiKey", "apiSecret", "keyName", "createdDate", "updatedDate", "workspaceId") + SELECT "id", "apiKey", "apiSecret", "keyName", "updatedDate", "updatedDate", "workspaceId" FROM "apikey"; + `) + + // step 4 - drop apikey table + await queryRunner.query(`DROP TABLE "apikey";`) + + // step 5 - alter temp_apikey to apikey table + await queryRunner.query(`ALTER TABLE "temp_apikey" RENAME TO "apikey";`) + + /*------------------------------------- + ---------------- User --------------- + --------------------------------------*/ + if (include) { + // step 1 - create temp table with activeWorkspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_user" ( + "id" varchar PRIMARY KEY NOT NULL, + "role" varchar NOT NULL, + "name" varchar, + "credential" text, + "tempToken" text, + "tokenExpiry" datetime, + "email" varchar NOT NULL, + "status" varchar NOT NULL, + "lastLogin" datetime, + "activeWorkspaceId" varchar NOT NULL, + FOREIGN KEY ("activeWorkspaceId") REFERENCES "workspace"("id") + ); + `) + + // step 2 - create index for activeWorkspaceId in temp_user table + await queryRunner.query(`CREATE INDEX IF NOT EXISTS "idx_user_activeWorkspaceId" ON "temp_user"("activeWorkspaceId");`) + + // step 3 - migrate data + await queryRunner.query(` + INSERT INTO "temp_user" ("id", "role", "name", "credential", "tempToken", "tokenExpiry", "email", "status", "lastLogin", "activeWorkspaceId") + SELECT "id", "role", "name", "credential", "tempToken", "tokenExpiry", "email", "status", "lastLogin", "activeWorkspaceId" FROM "user"; + `) + + // step 4 - drop user table + await queryRunner.query(`DROP TABLE "user";`) + + // step 5 - alter temp_user to user table + await queryRunner.query(`ALTER TABLE "temp_user" RENAME TO "user";`) + } + + /*---------------------------------------------- + ---------------- Workspace Users --------------- + ------------------------------------------------*/ + + if (include) { + // step 1 - create temp table with workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_workspace_users" ( + "id" varchar PRIMARY KEY NOT NULL, + "workspaceId" varchar NOT NULL, + "userId" varchar NOT NULL, + "role" varchar NOT NULL, + FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id") + ); + `) + + // step 2 - create index for workspaceId in temp_workspace_users table + await queryRunner.query(`CREATE INDEX IF NOT EXISTS "idx_workspace_users_workspaceId" ON "temp_workspace_users"("workspaceId");`) + + // step 3 - migrate data + await queryRunner.query(` + INSERT INTO "temp_workspace_users" ("id", "workspaceId", "userId", "role") + SELECT "id", "workspaceId", "userId", "role" FROM "workspace_users"; + `) + + // step 4 - drop workspace_users table + await queryRunner.query(`DROP TABLE "workspace_users";`) + + // step 5 - alter temp_workspace_users to workspace_users table + await queryRunner.query(`ALTER TABLE "temp_workspace_users" RENAME TO "workspace_users";`) + } + + /*---------------------------------------------- + ---------------- Chatflow ---------------------- + ------------------------------------------------*/ + + // step 1 - create temp table with workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_chat_flow" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" varchar NOT NULL, + "flowData" text NOT NULL, + "deployed" boolean, + "isPublic" boolean, + "apikeyid" varchar, + "chatbotConfig" text, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "apiConfig" TEXT, + "analytic" TEXT, + "category" TEXT, + "speechToText" TEXT, + "type" TEXT, + "workspaceId" TEXT, + "followUpPrompts" TEXT, + FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id") + ); + `) + + // step 2 - create index for workspaceId in temp_chat_flow table + await queryRunner.query(`CREATE INDEX IF NOT EXISTS "idx_chat_flow_workspaceId" ON "temp_chat_flow"("workspaceId");`) + + // step 3 - migrate data + await queryRunner.query(` + INSERT INTO "temp_chat_flow" ("id", "name", "flowData", "deployed", "isPublic", "apikeyid", "chatbotConfig", "createdDate", "updatedDate", "apiConfig", "analytic", "category", "speechToText", "type", "workspaceId", "followUpPrompts") + SELECT "id", "name", "flowData", "deployed", "isPublic", "apikeyid", "chatbotConfig", "createdDate", "updatedDate", "apiConfig", "analytic", "category", "speechToText", "type", "workspaceId", "followUpPrompts" FROM "chat_flow"; + `) + + // step 4 - drop chat_flow table + await queryRunner.query(`DROP TABLE "chat_flow";`) + + // step 5 - alter temp_chat_flow to chat_flow table + await queryRunner.query(`ALTER TABLE "temp_chat_flow" RENAME TO "chat_flow";`) + + /*---------------------------------------------- + ---------------- Tool -------------------------- + ------------------------------------------------*/ + + // step 1 - create temp table with workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_tool" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" varchar NOT NULL, + "description" text NOT NULL, + "color" varchar NOT NULL, + "iconSrc" varchar, + "schema" text, + "func" text, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "workspaceId" TEXT, + FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id") + ); + `) + + // step 2 - create index for workspaceId in temp_tool table + await queryRunner.query(`CREATE INDEX IF NOT EXISTS "idx_tool_workspaceId" ON "temp_tool"("workspaceId");`) + + // step 3 - migrate data + await queryRunner.query(` + INSERT INTO "temp_tool" ("id", "name", "description", "color", "iconSrc", "schema", "func", "createdDate", "updatedDate", "workspaceId") + SELECT "id", "name", "description", "color", "iconSrc", "schema", "func", "createdDate", "updatedDate", "workspaceId" FROM "tool"; + `) + + // step 4 - drop tool table + await queryRunner.query(`DROP TABLE "tool";`) + + // step 5 - alter temp_tool to tool table + await queryRunner.query(`ALTER TABLE "temp_tool" RENAME TO "tool";`) + + /*---------------------------------------------- + ---------------- Assistant ---------------------- + ------------------------------------------------*/ + + // step 1 - create temp table with workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_assistant" ( + "id" varchar PRIMARY KEY NOT NULL, + "details" text NOT NULL, + "credential" varchar NOT NULL, + "iconSrc" varchar, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "workspaceId" TEXT, + FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id") + ); + `) + + // step 2 - create index for workspaceId in temp_assistant table + await queryRunner.query(`CREATE INDEX IF NOT EXISTS "idx_assistant_workspaceId" ON "temp_assistant"("workspaceId");`) + + // step 3 - migrate data + await queryRunner.query(` + INSERT INTO "temp_assistant" ("id", "details", "credential", "iconSrc", "createdDate", "updatedDate", "workspaceId") + SELECT "id", "details", "credential", "iconSrc", "createdDate", "updatedDate", "workspaceId" FROM "assistant"; + `) + + // step 4 - drop assistant table + await queryRunner.query(`DROP TABLE "assistant";`) + + // step 5 - alter temp_assistant to assistant table + await queryRunner.query(`ALTER TABLE "temp_assistant" RENAME TO "assistant";`) + + /*---------------------------------------------- + ---------------- Credential ---------------------- + ------------------------------------------------*/ + + // step 1 - create temp table with workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_credential" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" varchar NOT NULL, + "credentialName" varchar NOT NULL, + "encryptedData" text NOT NULL, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "workspaceId" TEXT, + FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id") + ); + `) + + // step 2 - create index for workspaceId in temp_credential table + await queryRunner.query(`CREATE INDEX IF NOT EXISTS "idx_credential_workspaceId" ON "temp_credential"("workspaceId");`) + + // step 3 - migrate data + await queryRunner.query(` + INSERT INTO "temp_credential" ("id", "name", "credentialName", "encryptedData", "createdDate", "updatedDate", "workspaceId") + SELECT "id", "name", "credentialName", "encryptedData", "createdDate", "updatedDate", "workspaceId" FROM "credential"; + `) + + // step 4 - drop credential table + await queryRunner.query(`DROP TABLE "credential";`) + + // step 5 - alter temp_credential to credential table + await queryRunner.query(`ALTER TABLE "temp_credential" RENAME TO "credential";`) + + /*--------------------------------------------------- + ---------------- Document Store ---------------------- + -----------------------------------------------------*/ + + // step 1 - create temp table with workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_document_store" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" varchar NOT NULL, + "description" varchar, + "status" varchar NOT NULL, + "loaders" text, + "whereUsed" text, + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "vectorStoreConfig" TEXT, + "embeddingConfig" TEXT, + "recordManagerConfig" TEXT, + "workspaceId" TEXT, + FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id") + ); + `) + + // step 2 - create index for workspaceId in temp_document_store table + await queryRunner.query(`CREATE INDEX IF NOT EXISTS "idx_document_store_workspaceId" ON "temp_document_store"("workspaceId");`) + + // step 3 - migrate data + await queryRunner.query(` + INSERT INTO "temp_document_store" ("id", "name", "description", "status", "loaders", "whereUsed", "updatedDate", "createdDate", "vectorStoreConfig", "embeddingConfig", "recordManagerConfig", "workspaceId") + SELECT "id", "name", "description", "status", "loaders", "whereUsed", "updatedDate", "createdDate", "vectorStoreConfig", "embeddingConfig", "recordManagerConfig", "workspaceId" FROM "document_store"; + `) + + // step 4 - drop document_store table + await queryRunner.query(`DROP TABLE "document_store";`) + + // step 5 - alter temp_document_store to document_store table + await queryRunner.query(`ALTER TABLE "temp_document_store" RENAME TO "document_store";`) + + /*--------------------------------------------------- + ---------------- Evaluation ------------------------- + -----------------------------------------------------*/ + + // step 1 - create temp table with workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_evaluation" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" varchar NOT NULL, + "chatflowId" text NOT NULL, + "chatflowName" text NOT NULL, + "datasetId" varchar NOT NULL, + "datasetName" varchar NOT NULL, + "additionalConfig" text, + "status" varchar NOT NULL, + "evaluationType" varchar, + "average_metrics" text, + "runDate" datetime NOT NULL DEFAULT (datetime('now')), + "workspaceId" TEXT, + FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id") + ); + `) + + // step 2 - create index for workspaceId in temp_evaluation table + await queryRunner.query(`CREATE INDEX IF NOT EXISTS "idx_evaluation_workspaceId" ON "temp_evaluation"("workspaceId");`) + + // step 3 - migrate data + await queryRunner.query(` + INSERT INTO "temp_evaluation" ("id", "name", "chatflowId", "chatflowName", "datasetId", "datasetName", "additionalConfig", "status", "evaluationType", "average_metrics", "runDate", "workspaceId") + SELECT "id", "name", "chatflowId", "chatflowName", "datasetId", "datasetName", "additionalConfig", "status", "evaluationType", "average_metrics", "runDate", "workspaceId" FROM "evaluation"; + `) + + // step 4 - drop evaluation table + await queryRunner.query(`DROP TABLE "evaluation";`) + + // step 5 - alter temp_evaluation to evaluation table + await queryRunner.query(`ALTER TABLE "temp_evaluation" RENAME TO "evaluation";`) + + /*--------------------------------------------------- + ---------------- Evaluator ------------------------- + -----------------------------------------------------*/ + + // step 1 - create temp table with workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_evaluator" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" text NOT NULL, + "type" varchar, + "config" text, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "workspaceId" TEXT, + FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id") + ); + `) + + // step 2 - create index for workspaceId in temp_evaluator table + await queryRunner.query(`CREATE INDEX IF NOT EXISTS "idx_evaluator_workspaceId" ON "temp_evaluator"("workspaceId");`) + + // step 3 - migrate data + await queryRunner.query(` + INSERT INTO "temp_evaluator" ("id", "name", "type", "config", "createdDate", "updatedDate", "workspaceId") + SELECT "id", "name", "type", "config", "createdDate", "updatedDate", "workspaceId" FROM "evaluator"; + `) + + // step 4 - drop evaluator table + await queryRunner.query(`DROP TABLE "evaluator";`) + + // step 5 - alter temp_evaluator to evaluator table + await queryRunner.query(`ALTER TABLE "temp_evaluator" RENAME TO "evaluator";`) + + /*--------------------------------------------------- + ---------------- Dataset ------------------------- + -----------------------------------------------------*/ + + // step 1 - create temp table with workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_dataset" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" text NOT NULL, + "description" varchar, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "workspaceId" TEXT, + FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id") + ); + `) + + // step 2 - create index for workspaceId in temp_dataset table + await queryRunner.query(`CREATE INDEX IF NOT EXISTS "idx_dataset_workspaceId" ON "temp_dataset"("workspaceId");`) + + // step 3 - migrate data + await queryRunner.query(` + INSERT INTO "temp_dataset" ("id", "name", "description", "createdDate", "updatedDate", "workspaceId") + SELECT "id", "name", "description", "createdDate", "updatedDate", "workspaceId" FROM "dataset"; + `) + + // step 4 - drop dataset table + await queryRunner.query(`DROP TABLE "dataset";`) + + // step 5 - alter temp_dataset to dataset table + await queryRunner.query(`ALTER TABLE "temp_dataset" RENAME TO "dataset";`) + + /*--------------------------------------------------- + ---------------- Variable --------------------------- + -----------------------------------------------------*/ + + // step 1 - create temp table with workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_variable" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" text NOT NULL, + "value" text NOT NULL, + "type" varchar, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "workspaceId" TEXT, + FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id") + ); + `) + + // step 2 - create index for workspaceId in temp_variable table + await queryRunner.query(`CREATE INDEX IF NOT EXISTS "idx_variable_workspaceId" ON "temp_variable"("workspaceId");`) + + // step 3 - migrate data + await queryRunner.query(` + INSERT INTO "temp_variable" ("id", "name", "value", "type", "createdDate", "updatedDate", "workspaceId") + SELECT "id", "name", "value", "type", "createdDate", "updatedDate", "workspaceId" FROM "variable"; + `) + + // step 4 - drop variable table + await queryRunner.query(`DROP TABLE "variable";`) + + // step 5 - alter temp_variable to variable table + await queryRunner.query(`ALTER TABLE "temp_variable" RENAME TO "variable";`) + + /*--------------------------------------------------- + ---------------- Workspace Shared ------------------- + -----------------------------------------------------*/ + + // step 1 - create temp table with workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_workspace_shared" ( + "id" varchar PRIMARY KEY NOT NULL, + "workspaceId" varchar NOT NULL, + "sharedItemId" varchar NOT NULL, + "itemType" varchar NOT NULL, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id") + ); + `) + + // step 2 - create index for workspaceId in temp_workspace_shared table + await queryRunner.query(`CREATE INDEX IF NOT EXISTS "idx_workspace_shared_workspaceId" ON "temp_workspace_shared"("workspaceId");`) + + // step 3 - migrate data + await queryRunner.query(` + INSERT INTO "temp_workspace_shared" ("id", "workspaceId", "sharedItemId", "itemType", "createdDate", "updatedDate") + SELECT "id", "workspaceId", "sharedItemId", "itemType", "createdDate", "updatedDate" FROM "workspace_shared"; + `) + + // step 4 - drop workspace_shared table + await queryRunner.query(`DROP TABLE "workspace_shared";`) + + // step 5 - alter temp_workspace_shared to workspace_shared table + await queryRunner.query(`ALTER TABLE "temp_workspace_shared" RENAME TO "workspace_shared";`) + + /*--------------------------------------------------- + ---------------- Custom Template ------------------- + -----------------------------------------------------*/ + + // step 1 - create temp table with workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_custom_template" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" varchar NOT NULL, + "flowData" text NOT NULL, + "description" varchar, + "badge" varchar, + "framework" varchar, + "usecases" varchar, + "type" varchar, + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "workspaceId" TEXT, + FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id") + ); + `) + + // step 2 - create index for workspaceId in temp_custom_template table + await queryRunner.query(`CREATE INDEX IF NOT EXISTS "idx_custom_template_workspaceId" ON "temp_custom_template"("workspaceId");`) + + // step 3 - migrate data + await queryRunner.query(` + INSERT INTO "temp_custom_template" ("id", "name", "flowData", "description", "badge", "framework", "usecases", "type", "updatedDate", "createdDate", "workspaceId") + SELECT "id", "name", "flowData", "description", "badge", "framework", "usecases", "type", "updatedDate", "createdDate", "workspaceId" FROM "custom_template"; + `) + + // step 4 - drop custom_template table + await queryRunner.query(`DROP TABLE "custom_template";`) + + // step 5 - alter temp_custom_template to custom_template table + await queryRunner.query(`ALTER TABLE "temp_custom_template" RENAME TO "custom_template";`) +} + +export class LinkWorkspaceId1729130948686 implements MigrationInterface { + name = 'LinkWorkspaceId1729130948686' + + public async up(queryRunner: QueryRunner): Promise { + await linkWorkspaceId(queryRunner) + } + + public async down(queryRunner: QueryRunner): Promise { + // step 1 - create temp table without workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_apikey" ( + "id" varchar PRIMARY KEY NOT NULL, + "apiKey" varchar, + "apiSecret" varchar NOT NULL, + "keyName" varchar NOT NULL, + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "workspaceId" varchar + ); + `) + + // step 2 - migrate data + await queryRunner.query(` + INSERT INTO "temp_apikey" ("id", "apiKey", "apiSecret", "keyName", "updatedDate") + SELECT "id", "apiKey", "apiSecret", "keyName", "updatedDate" FROM "apikey"; + `) + + // step 3 - drop apikey table + await queryRunner.query(`DROP TABLE "apikey";`) + + // step 4 - alter temp_apikey to apiKey table + await queryRunner.query(`ALTER TABLE "temp_apikey" RENAME TO "apikey";`) + + // step 1 - create temp table without activeWorkspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_user" ( + "id" varchar PRIMARY KEY NOT NULL, + "role" varchar NOT NULL, + "name" varchar, + "credential" text, + "tempToken" text, + "tokenExpiry" datetime, + "email" varchar NOT NULL, + "status" varchar NOT NULL, + "activeWorkspaceId" varchar NOT NULL, + "lastLogin" datetime + ); + `) + + // step 2 - migrate data + await queryRunner.query(` + INSERT INTO "temp_user" ("id", "role", "name", "credential", "tempToken", "tokenExpiry", "email", "status", "lastLogin", "activeWorkspaceId") + SELECT "id", "role", "name", "credential", "tempToken", "tokenExpiry", "email", "status", "lastLogin", "activeWorkspaceId" FROM "user"; + `) + + // step 3 - drop user table + await queryRunner.query(`DROP TABLE "user";`) + + // step 4 - alter temp_user to user table + await queryRunner.query(`ALTER TABLE "temp_user" RENAME TO "user";`) + + // step 1 - create temp table without workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_workspace_users" ( + "id" varchar PRIMARY KEY NOT NULL, + "workspaceId" varchar NOT NULL, + "userId" varchar NOT NULL, + "role" varchar NOT NULL + ); + `) + + // step 2 - migrate data + await queryRunner.query(` + INSERT INTO "temp_workspace_users" ("id", "workspaceId", "userId", "role") + SELECT "id", "workspaceId", "userId", "role" FROM "workspace_users"; + `) + + // step 3 - drop workspace_users table + await queryRunner.query(`DROP TABLE "workspace_users";`) + + // step 4 - alter temp_workspace_users to workspace_users table + await queryRunner.query(`ALTER TABLE "temp_workspace_users" RENAME TO "workspace_users";`) + + // step 1 - create temp table without workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_chat_flow" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" varchar NOT NULL, + "flowData" text NOT NULL, + "deployed" boolean, + "isPublic" boolean, + "apikeyid" varchar, + "chatbotConfig" text, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "apiConfig" TEXT, + "analytic" TEXT, + "category" TEXT, + "speechToText" TEXT, + "type" TEXT, + "workspaceId" TEXT, + "followUpPrompts" TEXT + ); + `) + + // step 2 - migrate data + await queryRunner.query(` + INSERT INTO "temp_chat_flow" ("id", "name", "flowData", "deployed", "isPublic", "apikeyid", "chatbotConfig", "createdDate", "updatedDate", "apiConfig", "analytic", "category", "speechToText", "type", "workspaceId", "followUpPrompts") + SELECT "id", "name", "flowData", "deployed", "isPublic", "apikeyid", "chatbotConfig", "createdDate", "updatedDate", "apiConfig", "analytic", "category", "speechToText", "type", "workspaceId", "followUpPrompts" FROM "chat_flow"; + `) + + // step 3 - drop chat_flow table + await queryRunner.query(`DROP TABLE "chat_flow";`) + + // step 4 - alter temp_chat_flow to chat_flow table + await queryRunner.query(`ALTER TABLE "temp_chat_flow" RENAME TO "chat_flow";`) + + // step 1 - create temp table without workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_tool" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" varchar NOT NULL, + "description" text NOT NULL, + "color" varchar NOT NULL, + "iconSrc" varchar, + "schema" text, + "func" text, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "workspaceId" TEXT + ); + `) + + // step 2 - migrate data + await queryRunner.query(` + INSERT INTO "temp_tool" ("id", "name", "description", "color", "iconSrc", "schema", "func", "createdDate", "updatedDate", "workspaceId") + SELECT "id", "name", "description", "color", "iconSrc", "schema", "func", "createdDate", "updatedDate", "workspaceId" FROM "tool"; + `) + + // step 3 - drop tool table + await queryRunner.query(`DROP TABLE "tool";`) + + // step 4 - alter temp_tool to tool table + await queryRunner.query(`ALTER TABLE "temp_tool" RENAME TO "tool";`) + + // step 1 - create temp table without workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_assistant" ( + "id" varchar PRIMARY KEY NOT NULL, + "details" text NOT NULL, + "credential" varchar NOT NULL, + "iconSrc" varchar, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "workspaceId" TEXT + ); + `) + + // step 2 - migrate data + await queryRunner.query(` + INSERT INTO "temp_assistant" ("id", "details", "credential", "iconSrc", "createdDate", "updatedDate", "workspaceId") + SELECT "id", "details", "credential", "iconSrc", "createdDate", "updatedDate", "workspaceId" FROM "assistant"; + `) + + // step 3 - drop assistant table + await queryRunner.query(`DROP TABLE "assistant";`) + + // step 4 - alter temp_assistant to assistant table + await queryRunner.query(`ALTER TABLE "temp_assistant" RENAME TO "assistant";`) + + // step 1 - create temp table without workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_credential" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" varchar NOT NULL, + "credentialName" varchar NOT NULL, + "encryptedData" text NOT NULL, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "workspaceId" TEXT + ); + `) + + // step 2 - migrate data + await queryRunner.query(` + INSERT INTO "temp_credential" ("id", "name", "credentialName", "encryptedData", "createdDate", "updatedDate", "workspaceId") + SELECT "id", "name", "credentialName", "encryptedData", "createdDate", "updatedDate", "workspaceId" FROM "credential"; + `) + + // step 3 - drop credential table + await queryRunner.query(`DROP TABLE "credential";`) + + // step 4 - alter temp_credential to credential table + await queryRunner.query(`ALTER TABLE "temp_credential" RENAME TO "credential";`) + + // step 1 - create temp table without workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_document_store" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" varchar NOT NULL, + "description" varchar, + "status" varchar NOT NULL, + "loaders" text, + "whereUsed" text, + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "vectorStoreConfig" TEXT, + "embeddingConfig" TEXT, + "recordManagerConfig" TEXT, + "workspaceId" TEXT, + ); + `) + + // step 2 - migrate data + await queryRunner.query(` + INSERT INTO "temp_document_store" ("id", "name", "description", "status", "loaders", "whereUsed", "updatedDate", "createdDate", "vectorStoreConfig", "embeddingConfig", "recordManagerConfig", "workspaceId") + SELECT "id", "name", "description", "status", "loaders", "whereUsed", "updatedDate", "createdDate", "vectorStoreConfig", "embeddingConfig", "recordManagerConfig", "workspaceId" FROM "document_store"; + `) + + // step 3 - drop document_store table + await queryRunner.query(`DROP TABLE "document_store";`) + + // step 4 - alter temp_document_store to document_store table + await queryRunner.query(`ALTER TABLE "temp_document_store" RENAME TO "document_store";`) + + // step 1 - create temp table without workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_evaluation" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" varchar NOT NULL, + "chatflowId" text NOT NULL, + "chatflowName" text NOT NULL, + "datasetId" varchar NOT NULL, + "datasetName" varchar NOT NULL, + "additionalConfig" text, + "status" varchar NOT NULL, + "evaluationType" varchar, + "average_metrics" text, + "runDate" datetime NOT NULL DEFAULT (datetime('now')), + "workspaceId" TEXT + ); + `) + + // step 2 - migrate data + await queryRunner.query(` + INSERT INTO "temp_evaluation" ("id", "name", "chatflowId", "chatflowName", "datasetId", "datasetName", "additionalConfig", "status", "evaluationType", "average_metrics", "runDate", "workspaceId") + SELECT "id", "name", "chatflowId", "chatflowName", "datasetId", "datasetName", "additionalConfig", "status", "evaluationType", "average_metrics", "runDate", "workspaceId" FROM "evaluation"; + `) + + // step 3 - drop evaluation table + await queryRunner.query(`DROP TABLE "evaluation";`) + + // step 4 - alter temp_evaluation to evaluation table + await queryRunner.query(`ALTER TABLE "temp_evaluation" RENAME TO "evaluation";`) + + // step 1 - create temp table without workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_evaluator" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" text NOT NULL, + "type" varchar, + "config" text, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "workspaceId" TEXT + ); + `) + + // step 2 - migrate data + await queryRunner.query(` + INSERT INTO "temp_evaluator" ("id", "name", "type", "config", "createdDate", "updatedDate", "workspaceId") + SELECT "id", "name", "type", "config", "createdDate", "updatedDate", "workspaceId" FROM "evaluator"; + `) + + // step 3 - drop evaluator table + await queryRunner.query(`DROP TABLE "evaluator";`) + + // step 4 - alter temp_evaluator to evaluator table + await queryRunner.query(`ALTER TABLE "temp_evaluator" RENAME TO "evaluator";`) + + // step 1 - create temp table without workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_dataset" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" text NOT NULL, + "description" varchar, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "workspaceId" TEXT + ); + `) + + // step 2 - migrate data + await queryRunner.query(` + INSERT INTO "temp_dataset" ("id", "name", "description", "createdDate", "updatedDate", "workspaceId") + SELECT "id", "name", "description", "createdDate", "updatedDate", "workspaceId" FROM "dataset"; + `) + + // step 3 - drop dataset table + await queryRunner.query(`DROP TABLE "dataset";`) + + // step 4 - alter temp_dataset to dataset table + await queryRunner.query(`ALTER TABLE "temp_dataset" RENAME TO "dataset";`) + + // step 1 - create temp table without workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_variable" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" text NOT NULL, + "value" text NOT NULL, + "type" varchar, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "workspaceId" TEXT + ); + `) + + // step 2 - migrate data + await queryRunner.query(` + INSERT INTO "temp_variable" ("id", "name", "value", "type", "createdDate", "updatedDate", "workspaceId") + SELECT "id", "name", "value", "type", "createdDate", "updatedDate", "workspaceId" FROM "variable"; + `) + + // step 3 - drop variable table + await queryRunner.query(`DROP TABLE "variable";`) + + // step 4 - alter temp_variable to variable table + await queryRunner.query(`ALTER TABLE "temp_variable" RENAME TO "variable";`) + + // step 1 - create temp table without workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_workspace_shared" ( + "id" varchar PRIMARY KEY NOT NULL, + "workspaceId" varchar NOT NULL, + "sharedItemId" varchar NOT NULL, + "itemType" varchar NOT NULL, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')) + ); + `) + + // step 2 - migrate data + await queryRunner.query(` + INSERT INTO "temp_workspace_shared" ("id", "workspaceId", "sharedItemId", "itemType", "createdDate", "updatedDate") + SELECT "id", "workspaceId", "sharedItemId", "itemType", "createdDate", "updatedDate" FROM "workspace_shared"; + `) + + // step 3 - drop workspace_shared table + await queryRunner.query(`DROP TABLE "workspace_shared";`) + + // step 4 - alter temp_workspace_shared to workspace_shared table + await queryRunner.query(`ALTER TABLE "temp_workspace_shared" RENAME TO "workspace_shared";`) + + // step 1 - create temp table without workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_custom_template" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" varchar NOT NULL, + "flowData" text NOT NULL, + "description" varchar, + "badge" varchar, + "framework" varchar, + "usecases" varchar, + "type" varchar, + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "workspaceId" TEXT + ); + `) + + // step 2 - migrate data + await queryRunner.query(` + INSERT INTO "temp_custom_template" ("id", "name", "flowData", "description", "badge", "framework", "usecases", "type", "updatedDate", "createdDate", "workspaceId") + SELECT "id", "name", "flowData", "description", "badge", "framework", "usecases", "type", "updatedDate", "createdDate", "workspaceId" FROM "custom_template"; + `) + + // step 3 - drop custom_template table + await queryRunner.query(`DROP TABLE "custom_template";`) + + // step 4 - alter temp_custom_template to custom_template table + await queryRunner.query(`ALTER TABLE "temp_custom_template" RENAME TO "custom_template";`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/sqlite/1729133111652-LinkOrganizationId.ts b/packages/server/src/enterprise/database/migrations/sqlite/1729133111652-LinkOrganizationId.ts new file mode 100644 index 000000000..c73e78cfa --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/sqlite/1729133111652-LinkOrganizationId.ts @@ -0,0 +1,61 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class LinkOrganizationId1729133111652 implements MigrationInterface { + name = 'LinkOrganizationId1729133111652' + + public async up(queryRunner: QueryRunner): Promise { + // step 1 - create temp table with organizationId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_workspace" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" text NOT NULL, + "description" varchar, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "organizationId" varchar, + FOREIGN KEY ("organizationId") REFERENCES "organization"("id") + ); + `) + + // step 2 - create index for organizationId in temp_workspace table + await queryRunner.query(`CREATE INDEX IF NOT EXISTS "idx_workspace_organizationId" ON "temp_workspace"("organizationId");`) + + // step 3 - migrate data + await queryRunner.query(` + INSERT INTO "temp_workspace" ("id", "name", "description", "createdDate", "updatedDate", "organizationId") + SELECT "id", "name", "description", "createdDate", "updatedDate", "organizationId" FROM "workspace"; + `) + + // step 4 - drop workspace table + await queryRunner.query(`DROP TABLE "workspace";`) + + // step 5 - alter temp_workspace to workspace table + await queryRunner.query(`ALTER TABLE "temp_workspace" RENAME TO "workspace";`) + } + + public async down(queryRunner: QueryRunner): Promise { + // step 1 - create temp table without organizationId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_workspace" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" text NOT NULL, + "description" varchar, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "organizationId" varchar, + ); + `) + + // step 2 - migrate data + await queryRunner.query(` + INSERT INTO "temp_workspace" ("id", "name", "description", "createdDate", "updatedDate", "organizationId") + SELECT "id", "name", "description", "createdDate", "updatedDate", "organizationId" FROM "workspace"; + `) + + // step 3 - drop workspace table + await queryRunner.query(`DROP TABLE "workspace";`) + + // step 4 - alter temp_workspace to workspace table + await queryRunner.query(`ALTER TABLE "temp_workspace" RENAME TO "workspace";`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/sqlite/1730519457880-AddSSOColumns.ts b/packages/server/src/enterprise/database/migrations/sqlite/1730519457880-AddSSOColumns.ts new file mode 100644 index 000000000..da61bb87c --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/sqlite/1730519457880-AddSSOColumns.ts @@ -0,0 +1,16 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { ensureColumnExists } from './sqlliteCustomFunctions' + +export class AddSSOColumns1730519457880 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await ensureColumnExists(queryRunner, 'organization', 'sso_config', 'text') + await ensureColumnExists(queryRunner, 'user', 'user_type', 'varchar') + await ensureColumnExists(queryRunner, 'login_activity', 'login_mode', 'varchar') + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "organization" DROP COLUMN "sso_config";`) + await queryRunner.query(`ALTER TABLE "user" DROP COLUMN "user_type";`) + await queryRunner.query(`ALTER TABLE "login_activity" DROP COLUMN "login_mode";`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/sqlite/1734074497540-AddPersonalWorkspace.ts b/packages/server/src/enterprise/database/migrations/sqlite/1734074497540-AddPersonalWorkspace.ts new file mode 100644 index 000000000..8e00d71b7 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/sqlite/1734074497540-AddPersonalWorkspace.ts @@ -0,0 +1,28 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { v4 as uuidv4 } from 'uuid' + +export class AddPersonalWorkspace1734074497540 implements MigrationInterface { + name = 'AddPersonalWorkspace1734074497540' + + public async up(queryRunner: QueryRunner): Promise { + const users = await queryRunner.query(`select * from "user";`) + const organization = await queryRunner.query(`select "id" from "organization";`) + for (let user of users) { + const workspaceDescription = 'Personal Workspace of ' + user.id + const workspaceId = uuidv4() + + await queryRunner.query(` + insert into "workspace" ("id", "name", "description", "organizationId") + values('${workspaceId}', 'Personal Workspace', '${workspaceDescription}', '${organization[0].id}'); + `) + + const workspaceusersId = uuidv4() + await queryRunner.query(` + insert into "workspace_users" ("id", "workspaceId", "userId", "role") + values('${workspaceusersId}', '${workspaceId}', '${user.id}', 'pw'); + `) + } + } + + public async down(): Promise {} +} diff --git a/packages/server/src/enterprise/database/migrations/sqlite/1737076223692-RefactorEnterpriseDatabase.ts b/packages/server/src/enterprise/database/migrations/sqlite/1737076223692-RefactorEnterpriseDatabase.ts new file mode 100644 index 000000000..0ace2040c --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/sqlite/1737076223692-RefactorEnterpriseDatabase.ts @@ -0,0 +1,476 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { fixOpenSourceAssistantTable } from '../../../../database/migrations/sqlite/1743758056188-FixOpenSourceAssistantTable' +import { decrypt, encrypt } from '../../../utils/encryption.util' +import { LoginMethodStatus } from '../../entities/login-method.entity' +import { OrganizationUserStatus } from '../../entities/organization-user.entity' +import { OrganizationName } from '../../entities/organization.entity' +import { GeneralRole } from '../../entities/role.entity' +import { UserStatus } from '../../entities/user.entity' +import { WorkspaceUserStatus } from '../../entities/workspace-user.entity' +import { WorkspaceName } from '../../entities/workspace.entity' +import { linkWorkspaceId } from './1729130948686-LinkWorkspaceId' + +export class RefactorEnterpriseDatabase1737076223692 implements MigrationInterface { + name = 'RefactorEnterpriseDatabase1737076223692' + + private async modifyTable(queryRunner: QueryRunner): Promise { + /*------------------------------------- + --------------- user ----------------- + --------------------------------------*/ + // rename user table to temp_user + await queryRunner.query(`alter table "user" rename to "temp_user";`) + + // create user table + await queryRunner.query(` + create table "user" ( + "id" uuid default (lower(substr(hex(randomblob(16)), 1, 8) || '-' || substr(hex(randomblob(16)), 9, 4) || '-' || substr('1' || substr(hex(randomblob(16)), 9, 3), 1, 4) || '-' || substr('8' || substr(hex(randomblob(16)), 13, 3), 1, 4) || '-' || substr(hex(randomblob(16)), 17, 12))) primary key, + "name" varchar(100) not null, + "email" varchar(255) not null unique, + "credential" text null, + "tempToken" text null, + "tokenExpiry" timestamp null, + "status" varchar(20) default '${UserStatus.UNVERIFIED}' not null, + "createdDate" timestamp default current_timestamp not null, + "updatedDate" timestamp default current_timestamp not null, + "createdBy" uuid not null, + "updatedBy" uuid not null, + constraint "fk_createdBy" foreign key ("createdBy") references "user" ("id"), + constraint "fk_updatedBy" foreign key ("updatedBy") references "user" ("id") + ); + `) + + /*------------------------------------- + ----------- organization -------------- + --------------------------------------*/ + // rename organization table to temp_organization + await queryRunner.query(`alter table "organization" rename to "temp_organization";`) + + // create organization table + await queryRunner.query(` + create table "organization" ( + "id" uuid default (lower(substr(hex(randomblob(16)), 1, 8) || '-' || substr(hex(randomblob(16)), 9, 4) || '-' || substr('1' || substr(hex(randomblob(16)), 9, 3), 1, 4) || '-' || substr('8' || substr(hex(randomblob(16)), 13, 3), 1, 4) || '-' || substr(hex(randomblob(16)), 17, 12))) primary key, + "name" varchar(100) default '${OrganizationName.DEFAULT_ORGANIZATION}' not null, + "customerId" varchar(100) null, + "subscriptionId" varchar(100) null, + "createdDate" timestamp default current_timestamp not null, + "updatedDate" timestamp default current_timestamp not null, + "createdBy" uuid not null, + "updatedBy" uuid not null, + constraint "fk_createdBy" foreign key ("createdBy") references "user" ("id"), + constraint "fk_updatedBy" foreign key ("updatedBy") references "user" ("id") + ); + `) + + /*------------------------------------- + ----------- login method -------------- + --------------------------------------*/ + // create login_method table + await queryRunner.query(` + create table "login_method" ( + "id" uuid default (lower(substr(hex(randomblob(16)), 1, 8) || '-' || substr(hex(randomblob(16)), 9, 4) || '-' || substr('1' || substr(hex(randomblob(16)), 9, 3), 1, 4) || '-' || substr('8' || substr(hex(randomblob(16)), 13, 3), 1, 4) || '-' || substr(hex(randomblob(16)), 17, 12))) primary key, + "organizationId" uuid null, + "name" varchar(100) not null, + "config" text not null, + "status" varchar(20) default '${LoginMethodStatus.ENABLE}' not null, + "createdDate" timestamp default current_timestamp not null, + "updatedDate" timestamp default current_timestamp not null, + "createdBy" uuid null, + "updatedBy" uuid null, + constraint "fk_organizationId" foreign key ("organizationId") references "organization" ("id"), + constraint "fk_createdBy" foreign key ("createdBy") references "user" ("id"), + constraint "fk_updatedBy" foreign key ("updatedBy") references "user" ("id") + ); + `) + + /*------------------------------------- + --------------- role ------------------ + --------------------------------------*/ + // rename roles table to temp_role + await queryRunner.query(`alter table "roles" rename to "temp_role";`) + + // create organization_login_method table + await queryRunner.query(` + create table "role" ( + "id" uuid default (lower(substr(hex(randomblob(16)), 1, 8) || '-' || substr(hex(randomblob(16)), 9, 4) || '-' || substr('1' || substr(hex(randomblob(16)), 9, 3), 1, 4) || '-' || substr('8' || substr(hex(randomblob(16)), 13, 3), 1, 4) || '-' || substr(hex(randomblob(16)), 17, 12))) primary key, + "organizationId" uuid null, + "name" varchar(100) not null, + "description" text null, + "permissions" text not null, + "createdDate" timestamp default current_timestamp not null, + "updatedDate" timestamp default current_timestamp not null, + "createdBy" uuid null, + "updatedBy" uuid null, + constraint "fk_organizationId" foreign key ("organizationId") references "organization" ("id"), + constraint "fk_createdBy" foreign key ("createdBy") references "user" ("id"), + constraint "fk_updatedBy" foreign key ("updatedBy") references "user" ("id") + ); + `) + + /*------------------------------------- + ---------- organization_user ---------- + --------------------------------------*/ + // create organization_user table + await queryRunner.query(` + create table "organization_user" ( + "organizationId" uuid not null, + "userId" uuid not null, + "roleId" uuid not null, + "status" varchar(20) default '${OrganizationUserStatus.ACTIVE}' not null, + "createdDate" timestamp default current_timestamp not null, + "updatedDate" timestamp default current_timestamp not null, + "createdBy" uuid not null, + "updatedBy" uuid not null, + constraint "pk_organization_user" primary key ("organizationId", "userId"), + constraint "fk_organizationId" foreign key ("organizationId") references "organization" ("id"), + constraint "fk_userId" foreign key ("userId") references "user" ("id"), + constraint "fk_roleId" foreign key ("roleId") references "role" ("id"), + constraint "fk_createdBy" foreign key ("createdBy") references "user" ("id"), + constraint "fk_updatedBy" foreign key ("updatedBy") references "user" ("id") + ); + `) + + /*------------------------------------- + ------------- workspace --------------- + --------------------------------------*/ + // rename workspace table to temp_workspace + await queryRunner.query(`alter table "workspace" rename to "temp_workspace";`) + + // create workspace table + await queryRunner.query(` + create table "workspace" ( + "id" uuid default (lower(substr(hex(randomblob(16)), 1, 8) || '-' || substr(hex(randomblob(16)), 9, 4) || '-' || substr('1' || substr(hex(randomblob(16)), 9, 3), 1, 4) || '-' || substr('8' || substr(hex(randomblob(16)), 13, 3), 1, 4) || '-' || substr(hex(randomblob(16)), 17, 12))) primary key, + "name" varchar(100) not null, + "description" text null, + "createdDate" timestamp default current_timestamp not null, + "updatedDate" timestamp default current_timestamp not null, + "organizationId" uuid not null, + "createdBy" uuid not null, + "updatedBy" uuid not null, + constraint "fk_organizationId" foreign key ("organizationId") references "organization" ("id"), + constraint "fk_createdBy" foreign key ("createdBy") references "user" ("id"), + constraint "fk_updatedBy" foreign key ("updatedBy") references "user" ("id") + ); + `) + + /*------------------------------------- + ----------- workspace_user ------------ + --------------------------------------*/ + // rename workspace_users table to temp_workspace_user + await queryRunner.query(`alter table "workspace_users" rename to "temp_workspace_user";`) + + // create workspace_user table + await queryRunner.query(` + create table "workspace_user" ( + "workspaceId" uuid not null, + "userId" uuid not null, + "roleId" uuid not null, + "status" varchar(20) default '${WorkspaceUserStatus.INVITED}' not null, + "lastLogin" timestamp null, + "createdDate" timestamp default current_timestamp not null, + "updatedDate" timestamp default current_timestamp not null, + "createdBy" uuid not null, + "updatedBy" uuid not null, + constraint "pk_workspace_user" primary key ("workspaceId", "userId"), + constraint "fk_workspaceId" foreign key ("workspaceId") references "workspace" ("id"), + constraint "fk_userId" foreign key ("userId") references "user" ("id"), + constraint "fk_roleId" foreign key ("roleId") references "role" ("id"), + constraint "fk_createdBy" foreign key ("createdBy") references "user" ("id"), + constraint "fk_updatedBy" foreign key ("updatedBy") references "user" ("id") + ); + `) + } + + private async deleteWorkspaceWithoutUser(queryRunner: QueryRunner) { + const workspaceWithoutUser = await queryRunner.query(` + select w."id" as "id" from "workspace_user" as "wu" + right join "workspace" as "w" on "wu"."workspaceId" = "w"."id" + where "wu"."userId" is null; + `) + const workspaceIds = workspaceWithoutUser.map((workspace: { id: string }) => `'${workspace.id}'`).join(',') + + // Delete related records from other tables that reference the deleted workspaces + if (workspaceIds && workspaceIds.length > 0) { + await queryRunner.query(` + delete from "workspace_user" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "apikey" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "assistant" where "workspaceId" in (${workspaceIds}); + `) + const chatflows = await queryRunner.query(` + select id from "chat_flow" where "workspaceId" in (${workspaceIds}); + `) + const chatflowIds = chatflows.map((chatflow: { id: string }) => `'${chatflow.id}'`).join(',') + if (chatflowIds && chatflowIds.length > 0) { + await queryRunner.query(` + delete from "chat_flow" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "upsert_history" where "chatflowid" in (${chatflowIds}); + `) + await queryRunner.query(` + delete from "chat_message" where "chatflowid" in (${chatflowIds}); + `) + await queryRunner.query(` + delete from "chat_message_feedback" where "chatflowid" in (${chatflowIds}); + `) + } + await queryRunner.query(` + delete from "credential" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "custom_template" where "workspaceId" in (${workspaceIds}); + `) + const datasets = await queryRunner.query(` + select id from "dataset" where "workspaceId" in (${workspaceIds}); + `) + const datasetIds = datasets.map((dataset: { id: string }) => `'${dataset.id}'`).join(',') + if (datasetIds && datasetIds.length > 0) { + await queryRunner.query(` + delete from "dataset" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "dataset_row" where "datasetId" in (${datasetIds}); + `) + } + const documentStores = await queryRunner.query(` + select id from "document_store" where "workspaceId" in (${workspaceIds}); + `) + const documentStoreIds = documentStores.map((documentStore: { id: string }) => `'${documentStore.id}'`).join(',') + if (documentStoreIds && documentStoreIds.length > 0) { + await queryRunner.query(` + delete from "document_store" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "document_store_file_chunk" where "storeId" in (${documentStoreIds}); + `) + } + const evaluations = await queryRunner.query(` + select id from "evaluation" where "workspaceId" in (${workspaceIds}); + `) + const evaluationIds = evaluations.map((evaluation: { id: string }) => `'${evaluation.id}'`).join(',') + if (evaluationIds && evaluationIds.length > 0) { + await queryRunner.query(` + delete from "evaluation" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "evaluation_run" where "evaluationId" in (${evaluationIds}); + `) + } + await queryRunner.query(` + delete from "evaluator" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "tool" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "variable" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "workspace_shared" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "workspace" where "id" in (${workspaceIds}); + `) + } + } + + private async populateTable(queryRunner: QueryRunner): Promise { + // insert generalRole + const generalRole = [ + { + name: 'owner', + description: 'Has full control over the organization.', + permissions: '["organization","workspace"]' + }, + { + name: 'member', + description: 'Has limited control over the organization.', + permissions: '[]' + }, + { + name: 'personal workspace', + description: 'Has full control over the personal workspace', + permissions: + '[ "chatflows:view", "chatflows:create", "chatflows:update", "chatflows:duplicate", "chatflows:delete", "chatflows:export", "chatflows:import", "chatflows:config", "chatflows:domains", "agentflows:view", "agentflows:create", "agentflows:update", "agentflows:duplicate", "agentflows:delete", "agentflows:export", "agentflows:import", "agentflows:config", "agentflows:domains", "tools:view", "tools:create", "tools:update", "tools:delete", "tools:export", "assistants:view", "assistants:create", "assistants:update", "assistants:delete", "credentials:view", "credentials:create", "credentials:update", "credentials:delete", "credentials:share", "variables:view", "variables:create", "variables:update", "variables:delete", "apikeys:view", "apikeys:create", "apikeys:update", "apikeys:delete", "apikeys:import", "documentStores:view", "documentStores:create", "documentStores:update", "documentStores:delete", "documentStores:add-loader", "documentStores:delete-loader", "documentStores:preview-process", "documentStores:upsert-config", "datasets:view", "datasets:create", "datasets:update", "datasets:delete", "evaluators:view", "evaluators:create", "evaluators:update", "evaluators:delete", "evaluations:view", "evaluations:create", "evaluations:update", "evaluations:delete", "evaluations:run", "templates:marketplace", "templates:custom", "templates:custom-delete", "templates:toolexport", "templates:flowexport", "templates:custom-share", "workspace:export", "workspace:import", "executions:view", "executions:delete" ]' + } + ] + for (let role of generalRole) { + await queryRunner.query(` + insert into "role"("name", "description", "permissions") + values('${role.name}', '${role.description}', '${role.permissions}'); + `) + } + + const users = await queryRunner.query('select * from "temp_user";') + const noExistingData = users.length > 0 === false + if (noExistingData) return + + const organizations = await queryRunner.query('select * from "temp_organization";') + const organizationId = organizations[0].id + const adminUserId = organizations[0].adminUserId + const ssoConfig = organizations[0].sso_config ? JSON.parse(await decrypt(organizations[0].sso_config)).providers : [] + + /*------------------------------------- + --------------- user ----------------- + --------------------------------------*/ + // insert user with temp_user data + await queryRunner.query(` + insert into "user" ("id", "name", "email", "credential", "tempToken", "tokenExpiry", "status", "createdBy", "updatedBy") + select tu."id", coalesce(tu."name", tu."email"), tu."email", tu."credential", tu."tempToken", tu."tokenExpiry", tu."status", + '${adminUserId}', '${adminUserId}' + from "temp_user" as "tu"; + `) + + /*------------------------------------- + ----------- organization -------------- + --------------------------------------*/ + // insert organization with temp_organization data + await queryRunner.query(` + insert into "organization" ("id", "name", "createdBy", "updatedBy") + select "id", "name", "adminUserId", "adminUserId" from "temp_organization"; + `) + + /*------------------------------------- + ----------- login method -------------- + --------------------------------------*/ + // insert login_method with temp_organization data + for (let config of ssoConfig) { + const newConfigFormat = { + domain: config.domain === '' || config.domain === undefined ? undefined : config.domain, + tenantID: config.tenantID === '' || config.tenantID === undefined ? undefined : config.tenantID, + clientID: config.clientID === '' || config.clientID === undefined ? undefined : config.clientID, + clientSecret: config.clientSecret === '' || config.clientSecret === undefined ? undefined : config.clientSecret + } + const status = config.configEnabled === true ? LoginMethodStatus.ENABLE : LoginMethodStatus.DISABLE + + const allUndefined = Object.values(newConfigFormat).every((value) => value === undefined) + if (allUndefined && status === LoginMethodStatus.DISABLE) continue + const encryptData = await encrypt(JSON.stringify(newConfigFormat)) + + await queryRunner.query(` + insert into "login_method" ("organizationId", "name", "config", "status", "createdBy", "updatedBy") + values('${organizationId}','${config.providerName}','${encryptData}','${status}','${adminUserId}','${adminUserId}'); + `) + } + + /*------------------------------------- + --------------- role ------------------ + --------------------------------------*/ + // insert workspace role into role + const workspaceRole = await queryRunner.query(`select "id", "name", "description", "permissions" from "temp_role";`) + for (let role of workspaceRole) { + role.permissions = JSON.stringify(role.permissions.split(',').filter((permission: string) => permission.trim() !== '')) + const haveDescriptionQuery = `insert into "role" ("id", "organizationId", "name", "description", "permissions", "createdBy", "updatedBy") + values('${role.id}','${organizationId}','${role.name}','${role.description}','${role.permissions}','${adminUserId}','${adminUserId}');` + const noHaveDescriptionQuery = `insert into "role" ("id", "organizationId", "name", "permissions", "createdBy", "updatedBy") + values('${role.id}','${organizationId}','${role.name}','${role.permissions}','${adminUserId}','${adminUserId}');` + const insertRoleQuery = role.description ? haveDescriptionQuery : noHaveDescriptionQuery + await queryRunner.query(insertRoleQuery) + } + + /*------------------------------------- + ---------- organization_user ---------- + --------------------------------------*/ + const roles = await queryRunner.query('select * from "role";') + // insert organization_user with user, role and temp_organization data + for (let user of users) { + const roleId = + user.id === adminUserId + ? roles.find((role: any) => role.name === GeneralRole.OWNER).id + : roles.find((role: any) => role.name === GeneralRole.MEMBER).id + await queryRunner.query(` + insert into "organization_user" ("organizationId", "userId", "roleId", "status", "createdBy", "updatedBy") + values ('${organizationId}','${user.id}','${roleId}','${user.status}','${adminUserId}','${adminUserId}'); + `) + } + + /*------------------------------------- + ------------- workspace --------------- + --------------------------------------*/ + // for (let workspace of workspaces) { + // await queryRunner.query( + // `update "workspace" set "createdBy" = '${adminUserId}', "updatedBy" = '${adminUserId}' where "id" = '${workspace.id}';` + // ) + // } + + await queryRunner.query(` + insert into "workspace" ("id", "name", "description", "createdDate", "updatedDate", "organizationId", "createdBy", "updatedBy") + select "id", "name", "description", "createdDate", "updatedDate", "organizationId", '${adminUserId}', '${adminUserId}' from "temp_workspace"; + `) + + /*------------------------------------- + ----------- workspace_user ------------ + --------------------------------------*/ + const workspaces = await queryRunner.query('select * from "workspace";') + const workspaceUsers = await queryRunner.query('select * from "temp_workspace_user";') + for (let workspaceUser of workspaceUsers) { + switch (workspaceUser.role) { + case 'org_admin': + workspaceUser.role = roles.find((role: any) => role.name === GeneralRole.OWNER).id + break + case 'pw': + workspaceUser.role = roles.find((role: any) => role.name === GeneralRole.PERSONAL_WORKSPACE).id + break + default: + workspaceUser.role = roles.find((role: any) => role.name === workspaceUser.role).id + break + } + const user = users.find((user: any) => user.id === workspaceUser.userId) + const workspace = workspaces.find((workspace: any) => workspace.id === workspaceUser.workspaceId) + if (workspaceUser.workspaceId === user.activeWorkspaceId && user.lastLogin && user.status !== UserStatus.INVITED) { + const lastLogin = new Date(user.lastLogin).toISOString() + await queryRunner.query(` + insert into "workspace_user" ("workspaceId", "userId", "roleId", "status", "lastLogin","createdBy", "updatedBy") + values ('${workspaceUser.workspaceId}','${workspaceUser.userId}','${workspaceUser.role}','${WorkspaceUserStatus.ACTIVE}','${lastLogin}','${adminUserId}','${adminUserId}'); + `) + } else if (workspace.name === WorkspaceName.DEFAULT_PERSONAL_WORKSPACE && user.status === UserStatus.INVITED) { + // Skip personal workspaces for users who haven't signed up yet to avoid duplicates when they sign up. + // account.service.ts creates personal workspace during sign-up. + await queryRunner.query(` + delete from "temp_workspace_user" where "workspaceId" = '${workspaceUser.workspaceId}' and "userId" = '${workspaceUser.userId}'; + `) + await queryRunner.query(` + delete from "workspace" where "id" = '${workspaceUser.workspaceId}'; + `) + } else { + await queryRunner.query(` + insert into "workspace_user" ("workspaceId", "userId", "roleId", "status","createdBy", "updatedBy") + values ('${workspaceUser.workspaceId}','${workspaceUser.userId}','${workspaceUser.role}','${WorkspaceUserStatus.INVITED}','${adminUserId}','${adminUserId}'); + `) + } + } + + await this.deleteWorkspaceWithoutUser(queryRunner) + } + + private async deleteTempTable(queryRunner: QueryRunner): Promise { + await queryRunner.query(` + drop table "temp_workspace_user"; + `) + await queryRunner.query(` + drop table "temp_role"; + `) + await queryRunner.query(` + drop table "temp_organization"; + `) + await queryRunner.query(` + drop table "temp_user"; + `) + await queryRunner.query(` + drop table "temp_workspace"; + `) + } + + public async up(queryRunner: QueryRunner): Promise { + await this.modifyTable(queryRunner) + await this.populateTable(queryRunner) + await this.deleteTempTable(queryRunner) + await linkWorkspaceId(queryRunner, false) + await fixOpenSourceAssistantTable(queryRunner) + } + + public async down(): Promise {} +} diff --git a/packages/server/src/enterprise/database/migrations/sqlite/1746862866554-ExecutionLinkWorkspaceId.ts b/packages/server/src/enterprise/database/migrations/sqlite/1746862866554-ExecutionLinkWorkspaceId.ts new file mode 100644 index 000000000..4cba459de --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/sqlite/1746862866554-ExecutionLinkWorkspaceId.ts @@ -0,0 +1,73 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { ensureColumnExists } from './sqlliteCustomFunctions' + +export class ExecutionLinkWorkspaceId1746862866554 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await ensureColumnExists(queryRunner, 'execution', 'workspaceId', 'TEXT') + + // step 1 - create temp table with workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_execution" ( + "id" varchar PRIMARY KEY NOT NULL, + "executionData" text NOT NULL, + "action" text, + "state" varchar NOT NULL, + "agentflowId" varchar NOT NULL, + "sessionId" varchar NOT NULL, + "isPublic" boolean, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "stoppedDate" datetime, + "workspaceId" varchar, + FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id") + ); + `) + + // step 2 - create index for workspaceId in temp_execution table + await queryRunner.query(`CREATE INDEX IF NOT EXISTS "idx_execution_workspaceId" ON "temp_execution"("workspaceId");`) + + // step 3 - migrate data + await queryRunner.query(` + INSERT INTO "temp_execution" ("id", "executionData", "action", "state", "agentflowId", "sessionId", "isPublic", "createdDate", "updatedDate", "stoppedDate") + SELECT "id", "executionData", "action", "state", "agentflowId", "sessionId", "isPublic", "createdDate", "updatedDate", "stoppedDate" FROM "execution"; + `) + + // step 4 - drop execution table + await queryRunner.query(`DROP TABLE "execution";`) + + // step 5 - alter temp_execution to execution table + await queryRunner.query(`ALTER TABLE "temp_execution" RENAME TO "execution";`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "execution" DROP COLUMN "workspaceId";`) + + // step 1 - create temp table without workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_execution" ( + "id" varchar PRIMARY KEY NOT NULL, + "executionData" text NOT NULL, + "action" text, + "state" varchar NOT NULL, + "agentflowId" varchar NOT NULL, + "sessionId" varchar NOT NULL, + "isPublic" boolean, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "stoppedDate" datetime + ); + `) + + // step 2 - migrate data + await queryRunner.query(` + INSERT INTO "temp_execution" ("id", "executionData", "action", "state", "agentflowId", "sessionId", "isPublic", "createdDate", "updatedDate", "stoppedDate") + SELECT "id", "executionData", "action", "state", "agentflowId", "sessionId", "isPublic", "createdDate", "updatedDate", "stoppedDate" FROM "execution"; + `) + + // step 3 - drop execution table + await queryRunner.query(`DROP TABLE "execution";`) + + // step 4 - alter temp_execution to execution table + await queryRunner.query(`ALTER TABLE "temp_execution" RENAME TO "execution";`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/sqlite/sqlliteCustomFunctions.ts b/packages/server/src/enterprise/database/migrations/sqlite/sqlliteCustomFunctions.ts new file mode 100644 index 000000000..b21546f6f --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/sqlite/sqlliteCustomFunctions.ts @@ -0,0 +1,20 @@ +import { QueryRunner } from 'typeorm' + +export const ensureColumnExists = async ( + queryRunner: QueryRunner, + tableName: string, + columnName: string, + columnType: string // Accept column type as a parameter +): Promise => { + // Retrieve column information from the specified table + const columns = await queryRunner.query(`PRAGMA table_info(${tableName});`) + + // Check if the specified column exists + const columnExists = columns.some((col: any) => col.name === columnName) + + // Check if the specified column exists in the returned columns + if (!columnExists) { + // Add the column if it does not exist + await queryRunner.query(`ALTER TABLE ${tableName} ADD COLUMN ${columnName} ${columnType};`) + } +} diff --git a/packages/server/src/enterprise/emails/verify_email_cloud.hbs b/packages/server/src/enterprise/emails/verify_email_cloud.hbs new file mode 100644 index 000000000..98b276e72 --- /dev/null +++ b/packages/server/src/enterprise/emails/verify_email_cloud.hbs @@ -0,0 +1,1157 @@ + + + + FlowiseAI + + + + + + + + + + + + + + + + + + + + + +
+ + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + +
+ + + + + + +
+ +
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + +
+
+ Please confirm your email +
+
+
+ Hi there! ๐Ÿ‘‹, +

+ Welcome to FlowiseAI

+ To complete your registration, we need to verify your email address.

+
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + Verify Email Address + +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+
+

+ The FlowiseAI Team +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+
+ ย  +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+
+
+
+ + +
+ + +
+
+ + + + +
+ + \ No newline at end of file diff --git a/packages/server/src/enterprise/emails/verify_email_cloud.html b/packages/server/src/enterprise/emails/verify_email_cloud.html new file mode 100644 index 000000000..a8e075f06 --- /dev/null +++ b/packages/server/src/enterprise/emails/verify_email_cloud.html @@ -0,0 +1,1274 @@ + + + + FlowiseAI + + + + + + + + + + + + + + + + + + + + + +
+ + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + +
+ + + + + + +
+ +
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + +
+
+ Please confirm your email +
+
+
+ Hi there! ๐Ÿ‘‹,

+ Welcome to FlowiseAI

+ To complete your registration, we need to verify your email + address.

+
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + Verify Email Address + +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+
+

+ The FlowiseAI Team +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+
+ ย  +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+
+
+
+ + +
+ + +
+
+ + + + +
+ + diff --git a/packages/server/src/enterprise/emails/workspace_add_cloud.hbs b/packages/server/src/enterprise/emails/workspace_add_cloud.hbs new file mode 100644 index 000000000..3dce577ed --- /dev/null +++ b/packages/server/src/enterprise/emails/workspace_add_cloud.hbs @@ -0,0 +1,1165 @@ + + + FlowiseAI + + + + + + + + + + + + + + + + + + + + + +
+ + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + +
+ + + + + + +
+ +
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + +
+
+ You've been added +
to the + {{workspaceName}} + workspace. +
+
+
+ Hi there! ๐Ÿ‘‹, +

+ An administrator added you to their + {{workspaceName}} + workspace.

To get started: +
    +
  1. + Click the button below to go to your FlowiseAI dashboard +
  2. +
  3. You'll get immediate access to the workspace
  4. +
+
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + Go to dashboard + +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+
+

+ The FlowiseAI Team +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+
+ ย  +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+
+
+
+ + +
+ + +
+
+ + + + +
+ + \ No newline at end of file diff --git a/packages/server/src/enterprise/emails/workspace_add_cloud.html b/packages/server/src/enterprise/emails/workspace_add_cloud.html new file mode 100644 index 000000000..9c30212b2 --- /dev/null +++ b/packages/server/src/enterprise/emails/workspace_add_cloud.html @@ -0,0 +1,1279 @@ + + + + FlowiseAI + + + + + + + + + + + + + + + + + + + + + +
+ + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + +
+ + + + + + +
+ +
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + +
+
+ You've been added
to the {{workspaceName}} workspace. +
+
+
+ Hi there! ๐Ÿ‘‹,

+ An administrator added you to their {{workspaceName}} workspace.

To + get started: +
    +
  1. + Click the button below to go to your FlowiseAI dashboard +
  2. +
  3. You'll get immediate access to the workspace
  4. +
+
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + Go to dashboard + +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+
+

+ The FlowiseAI Team +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+
+ ย  +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+
+
+
+ + +
+ + +
+
+ + + + +
+ + diff --git a/packages/server/src/enterprise/emails/workspace_new_invite_cloud.hbs b/packages/server/src/enterprise/emails/workspace_new_invite_cloud.hbs new file mode 100644 index 000000000..b536d147b --- /dev/null +++ b/packages/server/src/enterprise/emails/workspace_new_invite_cloud.hbs @@ -0,0 +1,1163 @@ + + + FlowiseAI + + + + + + + + + + + + + + + + + + + + + +
+ + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + +
+ + + + + + +
+ +
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + +
+
+ You've been invited +
to the + {{workspaceName}} + workspace. +
+
+
+ Hi there! ๐Ÿ‘‹, +

+ An administrator invited you to join their + {{workspaceName}} + workspace.

To get started: +
    +
  1. Click the button below to create an account
  2. +
  3. You'll get immediate access to the workspace
  4. +
+
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + Sign up for free + +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+
+

+ The FlowiseAI Team +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+
+ ย  +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+
+
+
+ + +
+ + +
+
+ + + + +
+ + \ No newline at end of file diff --git a/packages/server/src/enterprise/emails/workspace_new_invite_cloud.html b/packages/server/src/enterprise/emails/workspace_new_invite_cloud.html new file mode 100644 index 000000000..7a82d2f72 --- /dev/null +++ b/packages/server/src/enterprise/emails/workspace_new_invite_cloud.html @@ -0,0 +1,1277 @@ + + + + FlowiseAI + + + + + + + + + + + + + + + + + + + + + +
+ + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + +
+ + + + + + +
+ +
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + +
+
+ You've been invited
to the {{workspaceName}} workspace. +
+
+
+ Hi there! ๐Ÿ‘‹,

+ An administrator invited you to join their {{workspaceName}} + workspace.

To get started: +
    +
  1. Click the button below to create an account
  2. +
  3. You'll get immediate access to the workspace
  4. +
+
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + Sign up for free + +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+
+

+ The FlowiseAI Team +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+
+ ย  +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+
+
+
+ + +
+ + +
+
+ + + + +
+ + diff --git a/packages/server/src/enterprise/emails/workspace_new_invite_enterprise.hbs b/packages/server/src/enterprise/emails/workspace_new_invite_enterprise.hbs new file mode 100644 index 000000000..644da4709 --- /dev/null +++ b/packages/server/src/enterprise/emails/workspace_new_invite_enterprise.hbs @@ -0,0 +1,875 @@ + + + FlowiseAI + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ + + + + + +
+ + +
+ + + + + + +
+ + + + + + + + + + +
+ + + + + + +
+ +
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ + +
+ + + + + + +
+ + + + + + + + + + + + + + + + + +
+
You've been invited +
to the + {{workspaceName}} + workspace in your organization. +
+
+
+ Hi there! ๐Ÿ‘‹, +

+ An administrator invited you to join the + {{workspaceName}} + workspace.

To get started: +
    +
  1. Click the button below to visit the login page
  2. +
  3. Sign in with your organization's SSO account or use email + and password
  4. +
  5. You'll get immediate access to the workspace
  6. +
+
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ Accept Invite +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ + +
+ + + + + + +
+ + + + + + +
+
+

+ The FlowiseAI Team +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + +
+ + + + + + +
+ + +
+ + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+
ย  +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+
+
+
+ + +
+ + +
+
+ + + + +
+ + \ No newline at end of file diff --git a/packages/server/src/enterprise/emails/workspace_new_invite_enterprise.html b/packages/server/src/enterprise/emails/workspace_new_invite_enterprise.html new file mode 100644 index 000000000..cae1b6cab --- /dev/null +++ b/packages/server/src/enterprise/emails/workspace_new_invite_enterprise.html @@ -0,0 +1,1282 @@ + + + + FlowiseAI + + + + + + + + + + + + + + + + + + + + + +
+ + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + +
+ + + + + + +
+ +
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + +
+
+ You've been invited
to the {{workspaceName}} workspace in your + organization. +
+
+
+ Hi there! ๐Ÿ‘‹,

+ An administrator invited you to join the {{workspaceName}} + workspace.

To get started: +
    +
  1. Click the button below to visit the login page
  2. +
  3. + Sign in with your organization's SSO account or use email + and password +
  4. +
  5. You'll get immediate access to the workspace
  6. +
+
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + Accept Invite + +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+
+

+ The FlowiseAI Team +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+
+ ย  +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+
+
+
+ + +
+ + +
+
+ + + + +
+ + diff --git a/packages/server/src/enterprise/emails/workspace_update_invite_cloud.hbs b/packages/server/src/enterprise/emails/workspace_update_invite_cloud.hbs new file mode 100644 index 000000000..a1f3837b0 --- /dev/null +++ b/packages/server/src/enterprise/emails/workspace_update_invite_cloud.hbs @@ -0,0 +1,1162 @@ + + + FlowiseAI + + + + + + + + + + + + + + + + + + + + + +
+ + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + +
+ + + + + + +
+ +
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + +
+
+ You've been invited +
to the + {{workspaceName}} + workspace. +
+
+
+ Hi there! ๐Ÿ‘‹,

+ Your invitation has been updated.

+ The administrator has modified your invitation details. Your + previous invite link is no longer valid.

To get started: +
    +
  1. Click the button below to create an account
  2. +
  3. You'll get immediate access to the workspace
  4. +
+
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + Sign up for free + +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+
+

+ The FlowiseAI Team +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+
+ ย  +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+
+
+
+ + +
+ + +
+
+ + + + +
+ + \ No newline at end of file diff --git a/packages/server/src/enterprise/emails/workspace_update_invite_cloud.html b/packages/server/src/enterprise/emails/workspace_update_invite_cloud.html new file mode 100644 index 000000000..552ac0419 --- /dev/null +++ b/packages/server/src/enterprise/emails/workspace_update_invite_cloud.html @@ -0,0 +1,1278 @@ + + + + FlowiseAI + + + + + + + + + + + + + + + + + + + + + +
+ + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + +
+ + + + + + +
+ +
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + +
+
+ You've been invited
to the {{workspaceName}} workspace. +
+
+
+ Hi there! ๐Ÿ‘‹,

+ Your invitation has been updated.

+ The administrator has modified your invitation details. Your + previous invite link is no longer valid.

To get started: +
    +
  1. Click the button below to create an account
  2. +
  3. You'll get immediate access to the workspace
  4. +
+
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + Sign up for free + +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+
+

+ The FlowiseAI Team +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+
+ ย  +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+
+
+
+ + +
+ + +
+
+ + + + +
+ + diff --git a/packages/server/src/enterprise/emails/workspace_update_invite_enterprise.hbs b/packages/server/src/enterprise/emails/workspace_update_invite_enterprise.hbs new file mode 100644 index 000000000..99fefddf6 --- /dev/null +++ b/packages/server/src/enterprise/emails/workspace_update_invite_enterprise.hbs @@ -0,0 +1,1166 @@ + + + FlowiseAI + + + + + + + + + + + + + + + + + + + + + +
+ + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + +
+ + + + + + +
+ +
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + +
+
+ You've been invited +
to the + {{workspaceName}} + workspace in your organization. +
+
+
+ Hi there! ๐Ÿ‘‹, +

+ The administrator has modified your invitation details. Your + previous invite link is no longer valid.

To get started: +
    +
  1. Click the button below to visit the login page
  2. +
  3. + Sign in with your organization's SSO account or use email + and password +
  4. +
  5. You'll get immediate access to the workspace
  6. +
+
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + Accept Invite + +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+
+

+ The FlowiseAI Team +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+
+ ย  +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+
+
+
+ + +
+ + +
+
+ + + + +
+ + \ No newline at end of file diff --git a/packages/server/src/enterprise/emails/workspace_update_invite_enterprise.html b/packages/server/src/enterprise/emails/workspace_update_invite_enterprise.html new file mode 100644 index 000000000..8e91c8cc8 --- /dev/null +++ b/packages/server/src/enterprise/emails/workspace_update_invite_enterprise.html @@ -0,0 +1,1282 @@ + + + + FlowiseAI + + + + + + + + + + + + + + + + + + + + + +
+ + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + +
+ + + + + + +
+ +
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + +
+
+ You've been invited
to the {{workspaceName}} workspace in your + organization. +
+
+
+ Hi there! ๐Ÿ‘‹,

+ The administrator has modified your invitation details. Your + previous invite link is no longer valid.

To get started: +
    +
  1. Click the button below to visit the login page
  2. +
  3. + Sign in with your organization's SSO account or use email + and password +
  4. +
  5. You'll get immediate access to the workspace
  6. +
+
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + Accept Invite + +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+
+

+ The FlowiseAI Team +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+
+ ย  +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+
+
+
+ + +
+ + +
+
+ + + + +
+ + diff --git a/packages/server/src/enterprise/emails/workspace_user_reset_password.hbs b/packages/server/src/enterprise/emails/workspace_user_reset_password.hbs new file mode 100644 index 000000000..c3c842ebf --- /dev/null +++ b/packages/server/src/enterprise/emails/workspace_user_reset_password.hbs @@ -0,0 +1,877 @@ + + + FlowiseAI + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ + + + + + +
+ + +
+ + + + + + +
+ + + + + + + + + + +
+ + + + + + +
+ +
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ + +
+ + + + + + +
+ + + + + + + + + + + + + + + + + +
+
Reset your FlowiseAI password +
+
+
+ Hi there! ๐Ÿ‘‹, +

+ We received a request to reset the password for your FlowiseAI + account. If you didn't make the request, you can safely ignore this + email. +

+ To reset your password, follow the instructions below: +
    +
  1. Visit the following link (or click the button below):
  2. + {{resetLink}} +
  3. Choose a new password
  4. +
+
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ Reset Password +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ + +
+ + + + + + +
+ + + + + + +
+
+

+ The FlowiseAI Team +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + +
+ + + + + + +
+ + +
+ + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+
ย  +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+
+
+
+ + +
+ + +
+
+ + + + +
+ + \ No newline at end of file diff --git a/packages/server/src/enterprise/emails/workspace_user_reset_password.html b/packages/server/src/enterprise/emails/workspace_user_reset_password.html new file mode 100644 index 000000000..912a64837 --- /dev/null +++ b/packages/server/src/enterprise/emails/workspace_user_reset_password.html @@ -0,0 +1,1282 @@ + + + + FlowiseAI + + + + + + + + + + + + + + + + + + + + + +
+ + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + +
+ + + + + + +
+ +
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + +
+
+ Reset your FlowiseAI password +
+
+
+ Hi there! ๐Ÿ‘‹,

+ We received a request to reset the password for your FlowiseAI + account. If you didn't make the request, you can safely ignore this + email.

+ To reset your password, follow the instructions below: +
    +
  1. Visit the following link (or click the button below):
  2. + {{resetLink}} +
  3. Choose a new password
  4. +
+
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + Reset Password + +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+
+

+ The FlowiseAI Team +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+
+ ย  +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+
+
+
+ + +
+ + +
+
+ + + + +
+ + diff --git a/packages/server/src/enterprise/middleware/passport/AuthStrategy.ts b/packages/server/src/enterprise/middleware/passport/AuthStrategy.ts new file mode 100644 index 000000000..eaadbfa5f --- /dev/null +++ b/packages/server/src/enterprise/middleware/passport/AuthStrategy.ts @@ -0,0 +1,44 @@ +import { JwtFromRequestFunction, Strategy as JwtStrategy, VerifiedCallback } from 'passport-jwt' +import { decryptToken } from '../../utils/tempTokenUtils' +import { Strategy } from 'passport' +import { Request } from 'express' +import { ICommonObject } from 'flowise-components' + +const _cookieExtractor = (req: any) => { + let jwt = null + + if (req && req.cookies) { + jwt = req.cookies['token'] + } + + return jwt +} + +export const getAuthStrategy = (options: any): Strategy => { + let jwtFromRequest: JwtFromRequestFunction + jwtFromRequest = _cookieExtractor + const jwtOptions = { + jwtFromRequest: jwtFromRequest, + passReqToCallback: true, + ...options + } + const jwtVerify = async (req: Request, payload: ICommonObject, done: VerifiedCallback) => { + try { + if (!req.user) { + return done(null, false, 'Unauthorized.') + } + const meta = decryptToken(payload.meta) + if (!meta) { + return done(null, false, 'Unauthorized.') + } + const ids = meta.split(':') + if (ids.length !== 2 || req.user.id !== ids[0]) { + return done(null, false, 'Unauthorized.') + } + done(null, req.user) + } catch (error) { + done(error, false) + } + } + return new JwtStrategy(jwtOptions, jwtVerify) +} diff --git a/packages/server/src/enterprise/middleware/passport/SessionPersistance.ts b/packages/server/src/enterprise/middleware/passport/SessionPersistance.ts new file mode 100644 index 000000000..afaf3c2f0 --- /dev/null +++ b/packages/server/src/enterprise/middleware/passport/SessionPersistance.ts @@ -0,0 +1,183 @@ +import Redis from 'ioredis' +import { RedisStore } from 'connect-redis' +import { getDatabaseSSLFromEnv } from '../../../DataSource' +import path from 'path' +import { getUserHome } from '../../../utils' +import type { Store } from 'express-session' +import { LoginSession } from '../../database/entities/login-session.entity' +import { getRunningExpressApp } from '../../../utils/getRunningExpressApp' + +let redisClient: Redis | null = null +let redisStore: RedisStore | null = null +let dbStore: Store | null = null + +export const initializeRedisClientAndStore = (): RedisStore => { + if (!redisClient) { + if (process.env.REDIS_URL) { + redisClient = new Redis(process.env.REDIS_URL) + } else { + redisClient = new Redis({ + host: process.env.REDIS_HOST || 'localhost', + port: parseInt(process.env.REDIS_PORT || '6379'), + username: process.env.REDIS_USERNAME || undefined, + password: process.env.REDIS_PASSWORD || undefined, + tls: + process.env.REDIS_TLS === 'true' + ? { + cert: process.env.REDIS_CERT ? Buffer.from(process.env.REDIS_CERT, 'base64') : undefined, + key: process.env.REDIS_KEY ? Buffer.from(process.env.REDIS_KEY, 'base64') : undefined, + ca: process.env.REDIS_CA ? Buffer.from(process.env.REDIS_CA, 'base64') : undefined + } + : undefined + }) + } + } + if (!redisStore) { + redisStore = new RedisStore({ client: redisClient }) + } + return redisStore +} + +export const initializeDBClientAndStore: any = () => { + if (dbStore) return dbStore + + const databaseType = process.env.DATABASE_TYPE || 'sqlite' + switch (databaseType) { + case 'mysql': { + const expressSession = require('express-session') + const MySQLStore = require('express-mysql-session')(expressSession) + const options = { + host: process.env.DATABASE_HOST, + port: parseInt(process.env.DATABASE_PORT || '3306'), + user: process.env.DATABASE_USER, + password: process.env.DATABASE_PASSWORD, + database: process.env.DATABASE_NAME, + createDatabaseTable: true, + schema: { + tableName: 'login_sessions' + } + } + dbStore = new MySQLStore(options) + return dbStore + } + case 'mariadb': + /* TODO: Implement MariaDB session store */ + break + case 'postgres': { + // default is postgres + const pg = require('pg') + const expressSession = require('express-session') + const pgSession = require('connect-pg-simple')(expressSession) + + const pgPool = new pg.Pool({ + host: process.env.DATABASE_HOST, + port: parseInt(process.env.DATABASE_PORT || '5432'), + user: process.env.DATABASE_USER, + password: process.env.DATABASE_PASSWORD, + database: process.env.DATABASE_NAME, + ssl: getDatabaseSSLFromEnv() + }) + dbStore = new pgSession({ + pool: pgPool, // Connection pool + tableName: 'login_sessions', + schemaName: 'public', + createTableIfMissing: true + }) + return dbStore + } + case 'default': + case 'sqlite': { + const expressSession = require('express-session') + const sqlSession = require('connect-sqlite3')(expressSession) + let flowisePath = path.join(getUserHome(), '.flowise') + const homePath = process.env.DATABASE_PATH ?? flowisePath + dbStore = new sqlSession({ + db: 'database.sqlite', + table: 'login_sessions', + dir: homePath + }) + return dbStore + } + } +} + +const getUserIdFromSession = (session: any): string | undefined => { + try { + const data = typeof session === 'string' ? JSON.parse(session) : session + return data?.passport?.user?.id + } catch { + return undefined + } +} + +export const destroyAllSessionsForUser = async (userId: string): Promise => { + try { + if (redisStore && redisClient) { + const prefix = (redisStore as any)?.prefix ?? 'sess:' + const pattern = `${prefix}*` + const keysToDelete: string[] = [] + const batchSize = 1000 + + const stream = redisClient.scanStream({ + match: pattern, + count: batchSize + }) + + for await (const keysBatch of stream) { + if (keysBatch.length === 0) continue + + const sessions = await redisClient.mget(...keysBatch) + for (let i = 0; i < sessions.length; i++) { + if (getUserIdFromSession(sessions[i]) === userId) { + keysToDelete.push(keysBatch[i]) + } + } + + if (keysToDelete.length >= batchSize) { + const pipeline = redisClient.pipeline() + keysToDelete.splice(0, batchSize).forEach((key) => pipeline.del(key)) + await pipeline.exec() + } + } + + if (keysToDelete.length > 0) { + const pipeline = redisClient.pipeline() + keysToDelete.forEach((key) => pipeline.del(key)) + await pipeline.exec() + } + } else if (dbStore) { + const appServer = getRunningExpressApp() + const dataSource = appServer.AppDataSource + const repository = dataSource.getRepository(LoginSession) + + const databaseType = process.env.DATABASE_TYPE || 'sqlite' + switch (databaseType) { + case 'sqlite': + await repository + .createQueryBuilder() + .delete() + .where(`json_extract(sess, '$.passport.user.id') = :userId`, { userId }) + .execute() + break + case 'mysql': + await repository + .createQueryBuilder() + .delete() + .where(`JSON_EXTRACT(sess, '$.passport.user.id') = :userId`, { userId }) + .execute() + break + case 'postgres': + await repository.createQueryBuilder().delete().where(`sess->'passport'->'user'->>'id' = :userId`, { userId }).execute() + break + default: + console.warn('Unsupported database type:', databaseType) + break + } + } else { + console.warn('Session store not available, skipping session invalidation') + } + } catch (error) { + console.error('Error destroying sessions for user:', error) + throw error + } +} diff --git a/packages/server/src/enterprise/middleware/passport/index.ts b/packages/server/src/enterprise/middleware/passport/index.ts new file mode 100644 index 000000000..dc7658030 --- /dev/null +++ b/packages/server/src/enterprise/middleware/passport/index.ts @@ -0,0 +1,435 @@ +import { HttpStatusCode } from 'axios' +import { RedisStore } from 'connect-redis' +import express, { NextFunction, Request, Response } from 'express' +import session from 'express-session' +import { StatusCodes } from 'http-status-codes' +import jwt, { JwtPayload, sign } from 'jsonwebtoken' +import passport from 'passport' +import { VerifiedCallback } from 'passport-jwt' +import { InternalFlowiseError } from '../../../errors/internalFlowiseError' +import { IdentityManager } from '../../../IdentityManager' +import { Platform } from '../../../Interface' +import { getRunningExpressApp } from '../../../utils/getRunningExpressApp' +import { OrganizationUserStatus } from '../../database/entities/organization-user.entity' +import { GeneralRole } from '../../database/entities/role.entity' +import { WorkspaceUser, WorkspaceUserStatus } from '../../database/entities/workspace-user.entity' +import { ErrorMessage, IAssignedWorkspace, LoggedInUser } from '../../Interface.Enterprise' +import { AccountService } from '../../services/account.service' +import { OrganizationUserErrorMessage, OrganizationUserService } from '../../services/organization-user.service' +import { OrganizationService } from '../../services/organization.service' +import { RoleErrorMessage, RoleService } from '../../services/role.service' +import { WorkspaceUserService } from '../../services/workspace-user.service' +import { decryptToken, encryptToken, generateSafeCopy } from '../../utils/tempTokenUtils' +import { getAuthStrategy } from './AuthStrategy' +import { initializeDBClientAndStore, initializeRedisClientAndStore } from './SessionPersistance' +import { v4 as uuidv4 } from 'uuid' + +const localStrategy = require('passport-local').Strategy + +const jwtAudience = process.env.JWT_AUDIENCE || 'AUDIENCE' +const jwtIssuer = process.env.JWT_ISSUER || 'ISSUER' + +const expireAuthTokensOnRestart = process.env.EXPIRE_AUTH_TOKENS_ON_RESTART === 'true' +const jwtAuthTokenSecret = process.env.JWT_AUTH_TOKEN_SECRET || 'auth_token' +const jwtRefreshSecret = process.env.JWT_REFRESH_TOKEN_SECRET || process.env.JWT_AUTH_TOKEN_SECRET || 'refresh_token' + +// Allow explicit override of cookie security settings +// This is useful when running behind a reverse proxy/load balancer that terminates SSL +const secureCookie = + process.env.SECURE_COOKIES === 'false' + ? false + : process.env.SECURE_COOKIES === 'true' + ? true + : process.env.APP_URL?.startsWith('https') + ? true + : false +const jwtOptions = { + secretOrKey: jwtAuthTokenSecret, + audience: jwtAudience, + issuer: jwtIssuer +} + +const _initializePassportMiddleware = async (app: express.Application) => { + // Configure session middleware + let options: any = { + secret: process.env.EXPRESS_SESSION_SECRET || 'flowise', + resave: false, + saveUninitialized: false, + cookie: { + secure: secureCookie, + httpOnly: true, + sameSite: 'lax' // Add sameSite attribute + } + } + + // if the auth tokens are not to be expired on restart, then configure the session store + if (!expireAuthTokensOnRestart) { + // configure session store based on the mode + if (process.env.MODE === 'queue') { + const redisStore = initializeRedisClientAndStore() + options.store = redisStore as RedisStore + } else { + // for the database store, choose store basis the DB configuration from .env + const dbSessionStore = initializeDBClientAndStore() + if (dbSessionStore) { + options.store = dbSessionStore + } + } + } + + app.use(session(options)) + app.use(passport.initialize()) + app.use(passport.session()) + + if (options.store) { + const appServer = getRunningExpressApp() + appServer.sessionStore = options.store + } + + passport.serializeUser((user: any, done) => { + done(null, user) + }) + + passport.deserializeUser((user: any, done) => { + done(null, user) + }) +} + +export const initializeJwtCookieMiddleware = async (app: express.Application, identityManager: IdentityManager) => { + await _initializePassportMiddleware(app) + + const strategy = getAuthStrategy(jwtOptions) + passport.use(strategy) + passport.use( + 'login', + new localStrategy( + { + usernameField: 'email', + passwordField: 'password', + session: true + }, + async (email: string, password: string, done: VerifiedCallback) => { + let queryRunner + try { + queryRunner = getRunningExpressApp().AppDataSource.createQueryRunner() + await queryRunner.connect() + const accountService = new AccountService() + const body: any = { + user: { + email: email, + credential: password + } + } + const response = await accountService.login(body) + const workspaceUser: WorkspaceUser = + Array.isArray(response.workspaceDetails) && response.workspaceDetails.length > 0 + ? response.workspaceDetails[0] + : (response.workspaceDetails as WorkspaceUser) + const workspaceUserService = new WorkspaceUserService() + workspaceUser.status = WorkspaceUserStatus.ACTIVE + workspaceUser.lastLogin = new Date().toISOString() + workspaceUser.updatedBy = workspaceUser.userId + const organizationUserService = new OrganizationUserService() + const { organizationUser } = await organizationUserService.readOrganizationUserByWorkspaceIdUserId( + workspaceUser.workspaceId, + workspaceUser.userId, + queryRunner + ) + if (!organizationUser) + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationUserErrorMessage.ORGANIZATION_USER_NOT_FOUND) + organizationUser.status = OrganizationUserStatus.ACTIVE + await workspaceUserService.updateWorkspaceUser(workspaceUser, queryRunner) + await organizationUserService.updateOrganizationUser(organizationUser) + + const workspaceUsers = await workspaceUserService.readWorkspaceUserByUserId(organizationUser.userId, queryRunner) + const assignedWorkspaces: IAssignedWorkspace[] = workspaceUsers.map((workspaceUser) => { + return { + id: workspaceUser.workspace.id, + name: workspaceUser.workspace.name, + role: workspaceUser.role?.name, + organizationId: workspaceUser.workspace.organizationId + } as IAssignedWorkspace + }) + + let roleService = new RoleService() + const ownerRole = await roleService.readGeneralRoleByName(GeneralRole.OWNER, queryRunner) + const role = await roleService.readRoleById(workspaceUser.roleId, queryRunner) + if (!role) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + + const orgService = new OrganizationService() + const organization = await orgService.readOrganizationById(organizationUser.organizationId, queryRunner) + if (!organization) { + return done('Organization not found') + } + const subscriptionId = organization.subscriptionId as string + const customerId = organization.customerId as string + const features = await identityManager.getFeaturesByPlan(subscriptionId) + const productId = await identityManager.getProductIdFromSubscription(subscriptionId) + + const loggedInUser: LoggedInUser = { + id: workspaceUser.userId, + email: response.user.email, + name: response.user?.name, + roleId: workspaceUser.roleId, + activeOrganizationId: organization.id, + activeOrganizationSubscriptionId: subscriptionId, + activeOrganizationCustomerId: customerId, + activeOrganizationProductId: productId, + isOrganizationAdmin: workspaceUser.roleId === ownerRole.id, + activeWorkspaceId: workspaceUser.workspaceId, + activeWorkspace: workspaceUser.workspace.name, + assignedWorkspaces, + permissions: [...JSON.parse(role.permissions)], + features + } + return done(null, loggedInUser, { message: 'Logged in Successfully' }) + } catch (error) { + return done(error) + } finally { + if (queryRunner) await queryRunner.release() + } + } + ) + ) + + app.post('/api/v1/auth/resolve', async (req, res) => { + // check for the organization, if empty redirect to the organization setup page for OpenSource and Enterprise Versions + // for Cloud (Horizontal) version, redirect to the signin page + const expressApp = getRunningExpressApp() + const platform = expressApp.identityManager.getPlatformType() + if (platform === Platform.CLOUD) { + return res.status(HttpStatusCode.Ok).json({ redirectUrl: '/signin' }) + } + const orgService = new OrganizationService() + const queryRunner = expressApp.AppDataSource.createQueryRunner() + await queryRunner.connect() + const registeredOrganizationCount = await orgService.countOrganizations(queryRunner) + await queryRunner.release() + if (registeredOrganizationCount === 0) { + switch (platform) { + case Platform.ENTERPRISE: + if (!identityManager.isLicenseValid()) { + return res.status(HttpStatusCode.Ok).json({ redirectUrl: '/license-expired' }) + } + return res.status(HttpStatusCode.Ok).json({ redirectUrl: '/organization-setup' }) + default: + return res.status(HttpStatusCode.Ok).json({ redirectUrl: '/organization-setup' }) + } + } + switch (platform) { + case Platform.ENTERPRISE: + if (!identityManager.isLicenseValid()) { + return res.status(HttpStatusCode.Ok).json({ redirectUrl: '/license-expired' }) + } + return res.status(HttpStatusCode.Ok).json({ redirectUrl: '/signin' }) + default: + return res.status(HttpStatusCode.Ok).json({ redirectUrl: '/signin' }) + } + }) + + app.post('/api/v1/auth/refreshToken', async (req, res) => { + const refreshToken = req.cookies.refreshToken + if (!refreshToken) return res.sendStatus(401) + + jwt.verify(refreshToken, jwtRefreshSecret, async (err: any, payload: any) => { + if (err || !payload) return res.status(401).json({ message: ErrorMessage.REFRESH_TOKEN_EXPIRED }) + // @ts-ignore + const loggedInUser = req.user as LoggedInUser + let isSSO = false + let newTokenResponse: any = {} + if (loggedInUser && loggedInUser.ssoRefreshToken) { + try { + newTokenResponse = await identityManager.getRefreshToken(loggedInUser.ssoProvider, loggedInUser.ssoRefreshToken) + if (newTokenResponse.error) { + return res.status(401).json({ message: ErrorMessage.REFRESH_TOKEN_EXPIRED }) + } + isSSO = true + } catch (error) { + return res.status(401).json({ message: ErrorMessage.REFRESH_TOKEN_EXPIRED }) + } + } + const meta = decryptToken(payload.meta) + if (!meta) { + return res.status(401).json({ message: ErrorMessage.REFRESH_TOKEN_EXPIRED }) + } + if (isSSO) { + loggedInUser.ssoToken = newTokenResponse.access_token + if (newTokenResponse.refresh_token) { + loggedInUser.ssoRefreshToken = newTokenResponse.refresh_token + } + return setTokenOrCookies(res, loggedInUser, false, req, false, true) + } else { + return setTokenOrCookies(res, loggedInUser, false, req) + } + }) + }) + + app.post('/api/v1/auth/login', (req, res, next?) => { + passport.authenticate('login', async (err: any, user: LoggedInUser) => { + try { + if (err || !user) { + return next ? next(err) : res.status(401).json(err) + } + if (identityManager.isEnterprise() && !identityManager.isLicenseValid()) { + return res.status(401).json({ redirectUrl: '/license-expired' }) + } + + req.session.regenerate((regenerateErr) => { + if (regenerateErr) { + return next ? next(regenerateErr) : res.status(500).json({ message: 'Session regeneration failed' }) + } + + req.login(user, { session: true }, async (error) => { + if (error) { + return next ? next(error) : res.status(401).json(error) + } + return setTokenOrCookies(res, user, true, req) + }) + }) + } catch (error: any) { + return next ? next(error) : res.status(401).json(error) + } + })(req, res, next) + }) +} + +export const setTokenOrCookies = ( + res: Response, + user: any, + regenerateRefreshToken: boolean, + req?: Request, + redirect?: boolean, + isSSO?: boolean +) => { + const token = generateJwtAuthToken(user) + let refreshToken: string = '' + if (regenerateRefreshToken) { + refreshToken = generateJwtRefreshToken(user) + } else { + refreshToken = req?.cookies?.refreshToken + } + const returnUser = generateSafeCopy(user) + returnUser.isSSO = !isSSO ? false : isSSO + + if (redirect) { + // 1. Generate a random token + const ssoToken = uuidv4() + + // 2. Store returnUser in your session store, keyed by ssoToken, with a short expiry + storeSSOUserPayload(ssoToken, returnUser) + // 3. Redirect with token only + const dashboardUrl = `/sso-success?token=${ssoToken}` + + // Return the token as a cookie in our response. + let resWithCookies = res + .cookie('token', token, { + httpOnly: true, + secure: secureCookie, + sameSite: 'lax' + }) + .cookie('refreshToken', refreshToken, { + httpOnly: true, + secure: secureCookie, + sameSite: 'lax' + }) + resWithCookies.redirect(dashboardUrl) + } else { + // Return the token as a cookie in our response. + res.cookie('token', token, { + httpOnly: true, + secure: secureCookie, + sameSite: 'lax' + }) + .cookie('refreshToken', refreshToken, { + httpOnly: true, + secure: secureCookie, + sameSite: 'lax' + }) + .type('json') + .send({ ...returnUser }) + } +} + +export const generateJwtAuthToken = (user: any) => { + let expiryInMinutes = -1 + if (user?.ssoToken) { + const jwtHeader = jwt.decode(user.ssoToken, { complete: true }) + if (jwtHeader) { + const utcSeconds = (jwtHeader.payload as any).exp + let d = new Date(0) // The 0 there is the key, which sets the date to the epoch + d.setUTCSeconds(utcSeconds) + // get the minutes difference from current time + expiryInMinutes = Math.abs(d.getTime() - new Date().getTime()) / 60000 + } + } + if (expiryInMinutes === -1) { + expiryInMinutes = process.env.JWT_TOKEN_EXPIRY_IN_MINUTES ? parseInt(process.env.JWT_TOKEN_EXPIRY_IN_MINUTES) : 60 + } + return _generateJwtToken(user, expiryInMinutes, jwtAuthTokenSecret) +} + +export const generateJwtRefreshToken = (user: any) => { + let expiryInMinutes = -1 + if (user.ssoRefreshToken) { + const jwtHeader = jwt.decode(user.ssoRefreshToken, { complete: false }) + if (jwtHeader && typeof jwtHeader !== 'string') { + const utcSeconds = (jwtHeader as JwtPayload).exp + if (utcSeconds) { + let d = new Date(0) // The 0 there is the key, which sets the date to the epoch + d.setUTCSeconds(utcSeconds) + // get the minutes difference from current time + expiryInMinutes = Math.abs(d.getTime() - new Date().getTime()) / 60000 + } + } + } + if (expiryInMinutes === -1) { + expiryInMinutes = process.env.JWT_REFRESH_TOKEN_EXPIRY_IN_MINUTES + ? parseInt(process.env.JWT_REFRESH_TOKEN_EXPIRY_IN_MINUTES) + : 129600 // 90 days + } + return _generateJwtToken(user, expiryInMinutes, jwtRefreshSecret) +} + +const _generateJwtToken = (user: Partial, expiryInMinutes: number, secret: string) => { + const encryptedUserInfo = encryptToken(user?.id + ':' + user?.activeWorkspaceId) + return sign({ id: user?.id, username: user?.name, meta: encryptedUserInfo }, secret!, { + expiresIn: expiryInMinutes + 'm', // Expiry in minutes + notBefore: '0', // Cannot use before now, can be configured to be deferred. + algorithm: 'HS256', // HMAC using SHA-256 hash algorithm + audience: jwtAudience, // The audience of the token + issuer: jwtIssuer // The issuer of the token + }) +} + +export const verifyToken = (req: Request, res: Response, next: NextFunction) => { + passport.authenticate('jwt', { session: true }, (err: any, user: LoggedInUser, info: object) => { + if (err) { + return next(err) + } + + // @ts-ignore + if (info && info.name === 'TokenExpiredError') { + if (req.cookies && req.cookies.refreshToken) { + return res.status(401).json({ message: ErrorMessage.TOKEN_EXPIRED, retry: true }) + } + return res.status(401).json({ message: ErrorMessage.INVALID_MISSING_TOKEN }) + } + + if (!user) { + return res.status(401).json({ message: ErrorMessage.INVALID_MISSING_TOKEN }) + } + + const identityManager = getRunningExpressApp().identityManager + if (identityManager.isEnterprise() && !identityManager.isLicenseValid()) { + return res.status(401).json({ redirectUrl: '/license-expired' }) + } + + req.user = user + next() + })(req, res, next) +} + +const storeSSOUserPayload = (ssoToken: string, returnUser: any) => { + const app = getRunningExpressApp() + app.cachePool.addSSOTokenCache(ssoToken, returnUser) +} diff --git a/packages/server/src/enterprise/middleware/prometheus/index.ts b/packages/server/src/enterprise/middleware/prometheus/index.ts new file mode 100644 index 000000000..d0334825a --- /dev/null +++ b/packages/server/src/enterprise/middleware/prometheus/index.ts @@ -0,0 +1,65 @@ +import express from 'express' +import promClient, { Counter } from 'prom-client' + +export const initializePrometheus = (app: express.Application) => { + const register = new promClient.Registry() + register.setDefaultLabels({ + app: 'FlowiseAI' + }) + + const predictionsTotal = new promClient.Counter({ + name: 'checkouts_total', + help: 'Total number of checkouts', + labelNames: ['payment_method'] + }) + + const requestCounter = new Counter({ + name: 'http_requests_total', + help: 'Total number of HTTP requests', + labelNames: ['method', 'path', 'status'] + }) + + app.use('/api/v1/prediction', async (req, res) => { + res.on('finish', async () => { + requestCounter.labels(req?.method, req?.path, res.statusCode.toString()).inc() + predictionsTotal.labels('success').inc() + }) + }) + + // enable default metrics like CPU usage, memory usage, etc. + promClient.collectDefaultMetrics({ register }) + // Add our custom metric to the registry + register.registerMetric(requestCounter) + register.registerMetric(predictionsTotal) + + // Add Prometheus middleware to the app + app.use('/api/v1/metrics', async (req, res) => { + res.set('Content-Type', register.contentType) + const currentMetrics = await register.metrics() + res.send(currentMetrics) + }) + + const httpRequestDurationMicroseconds = new promClient.Histogram({ + name: 'http_request_duration_ms', + help: 'Duration of HTTP requests in ms', + labelNames: ['method', 'route', 'code'], + buckets: [1, 5, 15, 50, 100, 200, 300, 400, 500] // buckets for response time from 0.1ms to 500ms + }) + register.registerMetric(httpRequestDurationMicroseconds) + + // Runs before each requests + app.use((req, res, next) => { + res.locals.startEpoch = Date.now() + next() + }) + + // Runs after each requests + app.use((req, res, next) => { + res.on('finish', async () => { + requestCounter.inc() + const responseTimeInMs = Date.now() - res.locals.startEpoch + httpRequestDurationMicroseconds.labels(req.method, req?.route?.path, res.statusCode.toString()).observe(responseTimeInMs) + }) + next() + }) +} diff --git a/packages/server/src/enterprise/rbac/PermissionCheck.ts b/packages/server/src/enterprise/rbac/PermissionCheck.ts new file mode 100644 index 000000000..d0856c4bc --- /dev/null +++ b/packages/server/src/enterprise/rbac/PermissionCheck.ts @@ -0,0 +1,46 @@ +import { NextFunction, Request, Response } from 'express' +import { ErrorMessage } from '../Interface.Enterprise' + +// Check if the user has the required permission for a route +export const checkPermission = (permission: string) => { + return (req: Request, res: Response, next: NextFunction) => { + const user = req.user + // if the user is not logged in, return forbidden + if (user) { + if (user.isOrganizationAdmin) { + return next() + } + const permissions = user.permissions + if (permissions && permissions.includes(permission)) { + return next() + } + } + // else throw 403 forbidden error + return res.status(403).json({ message: ErrorMessage.FORBIDDEN }) + } +} + +// checks for any permission, input is the permissions separated by comma +export const checkAnyPermission = (permissionsString: string) => { + return (req: Request, res: Response, next: NextFunction) => { + const user = req.user + // if the user is not logged in, return forbidden + if (user) { + if (user.isOrganizationAdmin) { + return next() + } + const permissions = user.permissions + const permissionIds = permissionsString.split(',') + if (permissions && permissions.length) { + // split permissions and check if any of the permissions are present in the user's permissions + for (let i = 0; i < permissionIds.length; i++) { + if (permissions.includes(permissionIds[i])) { + return next() + } + } + } + } + // else throw 403 forbidden error + return res.status(403).json({ message: ErrorMessage.FORBIDDEN }) + } +} diff --git a/packages/server/src/enterprise/rbac/Permissions.ts b/packages/server/src/enterprise/rbac/Permissions.ts new file mode 100644 index 000000000..e44f541a8 --- /dev/null +++ b/packages/server/src/enterprise/rbac/Permissions.ts @@ -0,0 +1,179 @@ +export class Permissions { + private categories: PermissionCategory[] = [] + constructor() { + // const auditCategory = new PermissionCategory('audit') + // auditCategory.addPermission(new Permission('auditLogs:view', 'View Audit Logs')) + // this.categories.push(auditCategory) + + const chatflowsCategory = new PermissionCategory('chatflows') + chatflowsCategory.addPermission(new Permission('chatflows:view', 'View')) + chatflowsCategory.addPermission(new Permission('chatflows:create', 'Create')) + chatflowsCategory.addPermission(new Permission('chatflows:update', 'Update')) + chatflowsCategory.addPermission(new Permission('chatflows:duplicate', 'Duplicate')) + chatflowsCategory.addPermission(new Permission('chatflows:delete', 'Delete')) + chatflowsCategory.addPermission(new Permission('chatflows:export', 'Export')) + chatflowsCategory.addPermission(new Permission('chatflows:import', 'Import')) + chatflowsCategory.addPermission(new Permission('chatflows:config', 'Edit Configuration')) + chatflowsCategory.addPermission(new Permission('chatflows:domains', 'Allowed Domains')) + this.categories.push(chatflowsCategory) + + const agentflowsCategory = new PermissionCategory('agentflows') + agentflowsCategory.addPermission(new Permission('agentflows:view', 'View')) + agentflowsCategory.addPermission(new Permission('agentflows:create', 'Create')) + agentflowsCategory.addPermission(new Permission('agentflows:update', 'Update')) + agentflowsCategory.addPermission(new Permission('agentflows:duplicate', 'Duplicate')) + agentflowsCategory.addPermission(new Permission('agentflows:delete', 'Delete')) + agentflowsCategory.addPermission(new Permission('agentflows:export', 'Export')) + agentflowsCategory.addPermission(new Permission('agentflows:import', 'Import')) + agentflowsCategory.addPermission(new Permission('agentflows:config', 'Edit Configuration')) + agentflowsCategory.addPermission(new Permission('agentflows:domains', 'Allowed Domains')) + this.categories.push(agentflowsCategory) + + const toolsCategory = new PermissionCategory('tools') + toolsCategory.addPermission(new Permission('tools:view', 'View')) + toolsCategory.addPermission(new Permission('tools:create', 'Create')) + toolsCategory.addPermission(new Permission('tools:update', 'Update')) + toolsCategory.addPermission(new Permission('tools:delete', 'Delete')) + toolsCategory.addPermission(new Permission('tools:export', 'Export')) + this.categories.push(toolsCategory) + + const assistantsCategory = new PermissionCategory('assistants') + assistantsCategory.addPermission(new Permission('assistants:view', 'View')) + assistantsCategory.addPermission(new Permission('assistants:create', 'Create')) + assistantsCategory.addPermission(new Permission('assistants:update', 'Update')) + assistantsCategory.addPermission(new Permission('assistants:delete', 'Delete')) + this.categories.push(assistantsCategory) + + const credentialsCategory = new PermissionCategory('credentials') + credentialsCategory.addPermission(new Permission('credentials:view', 'View')) + credentialsCategory.addPermission(new Permission('credentials:create', 'Create')) + credentialsCategory.addPermission(new Permission('credentials:update', 'Update')) + credentialsCategory.addPermission(new Permission('credentials:delete', 'Delete')) + credentialsCategory.addPermission(new Permission('credentials:share', 'Share')) + this.categories.push(credentialsCategory) + + const variablesCategory = new PermissionCategory('variables') + variablesCategory.addPermission(new Permission('variables:view', 'View')) + variablesCategory.addPermission(new Permission('variables:create', 'Create')) + variablesCategory.addPermission(new Permission('variables:update', 'Update')) + variablesCategory.addPermission(new Permission('variables:delete', 'Delete')) + this.categories.push(variablesCategory) + + const apikeysCategory = new PermissionCategory('apikeys') + apikeysCategory.addPermission(new Permission('apikeys:view', 'View')) + apikeysCategory.addPermission(new Permission('apikeys:create', 'Create')) + apikeysCategory.addPermission(new Permission('apikeys:update', 'Update')) + apikeysCategory.addPermission(new Permission('apikeys:delete', 'Delete')) + apikeysCategory.addPermission(new Permission('apikeys:import', 'Import')) + this.categories.push(apikeysCategory) + + const documentStoresCategory = new PermissionCategory('documentStores') + documentStoresCategory.addPermission(new Permission('documentStores:view', 'View')) + documentStoresCategory.addPermission(new Permission('documentStores:create', 'Create')) + documentStoresCategory.addPermission(new Permission('documentStores:update', 'Update')) + documentStoresCategory.addPermission(new Permission('documentStores:delete', 'Delete Document Store')) + documentStoresCategory.addPermission(new Permission('documentStores:add-loader', 'Add Document Loader')) + documentStoresCategory.addPermission(new Permission('documentStores:delete-loader', 'Delete Document Loader')) + documentStoresCategory.addPermission(new Permission('documentStores:preview-process', 'Preview & Process Document Chunks')) + documentStoresCategory.addPermission(new Permission('documentStores:upsert-config', 'Upsert Config')) + this.categories.push(documentStoresCategory) + + const datasetsCategory = new PermissionCategory('datasets') + datasetsCategory.addPermission(new Permission('datasets:view', 'View')) + datasetsCategory.addPermission(new Permission('datasets:create', 'Create')) + datasetsCategory.addPermission(new Permission('datasets:update', 'Update')) + datasetsCategory.addPermission(new Permission('datasets:delete', 'Delete')) + this.categories.push(datasetsCategory) + + const executionsCategory = new PermissionCategory('executions') + executionsCategory.addPermission(new Permission('executions:view', 'View')) + executionsCategory.addPermission(new Permission('executions:delete', 'Delete')) + this.categories.push(executionsCategory) + + const evaluatorsCategory = new PermissionCategory('evaluators') + evaluatorsCategory.addPermission(new Permission('evaluators:view', 'View')) + evaluatorsCategory.addPermission(new Permission('evaluators:create', 'Create')) + evaluatorsCategory.addPermission(new Permission('evaluators:update', 'Update')) + evaluatorsCategory.addPermission(new Permission('evaluators:delete', 'Delete')) + this.categories.push(evaluatorsCategory) + + const evaluationsCategory = new PermissionCategory('evaluations') + evaluationsCategory.addPermission(new Permission('evaluations:view', 'View')) + evaluationsCategory.addPermission(new Permission('evaluations:create', 'Create')) + evaluationsCategory.addPermission(new Permission('evaluations:update', 'Update')) + evaluationsCategory.addPermission(new Permission('evaluations:delete', 'Delete')) + evaluationsCategory.addPermission(new Permission('evaluations:run', 'Run Again')) + this.categories.push(evaluationsCategory) + + const templatesCategory = new PermissionCategory('templates') + templatesCategory.addPermission(new Permission('templates:marketplace', 'View Marketplace Templates')) + templatesCategory.addPermission(new Permission('templates:custom', 'View Custom Templates')) + templatesCategory.addPermission(new Permission('templates:custom-delete', 'Delete Custom Template')) + templatesCategory.addPermission(new Permission('templates:toolexport', 'Export Tool as Template')) + templatesCategory.addPermission(new Permission('templates:flowexport', 'Export Flow as Template')) + templatesCategory.addPermission(new Permission('templates:custom-share', 'Share Custom Templates')) + this.categories.push(templatesCategory) + + const workspaceCategory = new PermissionCategory('workspace') + workspaceCategory.addPermission(new Permission('workspace:view', 'View')) + workspaceCategory.addPermission(new Permission('workspace:create', 'Create')) + workspaceCategory.addPermission(new Permission('workspace:update', 'Update')) + workspaceCategory.addPermission(new Permission('workspace:add-user', 'Add User')) + workspaceCategory.addPermission(new Permission('workspace:unlink-user', 'Remove User')) + workspaceCategory.addPermission(new Permission('workspace:delete', 'Delete')) + workspaceCategory.addPermission(new Permission('workspace:export', 'Export Data within Workspace')) + workspaceCategory.addPermission(new Permission('workspace:import', 'Import Data within Workspace')) + this.categories.push(workspaceCategory) + + const adminCategory = new PermissionCategory('admin') + adminCategory.addPermission(new Permission('users:manage', 'Manage Users')) + adminCategory.addPermission(new Permission('roles:manage', 'Manage Roles')) + adminCategory.addPermission(new Permission('sso:manage', 'Manage SSO')) + this.categories.push(adminCategory) + + const logsCategory = new PermissionCategory('logs') + logsCategory.addPermission(new Permission('logs:view', 'View Logs', true)) + this.categories.push(logsCategory) + + const loginActivityCategory = new PermissionCategory('loginActivity') + loginActivityCategory.addPermission(new Permission('loginActivity:view', 'View Login Activity', true)) + loginActivityCategory.addPermission(new Permission('loginActivity:delete', 'Delete Login Activity', true)) + this.categories.push(loginActivityCategory) + } + + public toJSON(): { [key: string]: { key: string; value: string }[] } { + return this.categories.reduce((acc, category) => { + return { + ...acc, + ...category.toJSON() + } + }, {}) + } +} + +export class PermissionCategory { + public permissions: any[] = [] + + constructor(public category: string) {} + + addPermission(permission: Permission) { + this.permissions.push(permission) + } + public toJSON() { + return { + [this.category]: [...this.permissions.map((permission) => permission.toJSON())] + } + } +} + +export class Permission { + constructor(public name: string, public description: string, public isEnterprise: boolean = false) {} + + public toJSON() { + return { + key: this.name, + value: this.description, + isEnterprise: this.isEnterprise + } + } +} diff --git a/packages/server/src/enterprise/routes/account.route.ts b/packages/server/src/enterprise/routes/account.route.ts new file mode 100644 index 000000000..57dbdce07 --- /dev/null +++ b/packages/server/src/enterprise/routes/account.route.ts @@ -0,0 +1,37 @@ +import express from 'express' +import { AccountController } from '../controllers/account.controller' +import { IdentityManager } from '../../IdentityManager' +import { checkAnyPermission } from '../rbac/PermissionCheck' + +const router = express.Router() +const accountController = new AccountController() + +router.post('/register', accountController.register) + +// feature flag to workspace since only user who has workspaces can invite +router.post( + '/invite', + IdentityManager.checkFeatureByPlan('feat:workspaces'), + checkAnyPermission('workspace:add-user,users:manage'), + accountController.invite +) + +router.post('/login', accountController.login) + +router.post('/logout', accountController.logout) + +router.post('/verify', accountController.verify) + +router.post('/resend-verification', accountController.resendVerificationEmail) + +router.post('/forgot-password', accountController.forgotPassword) + +router.post('/reset-password', accountController.resetPassword) + +router.post('/billing', accountController.createStripeCustomerPortalSession) + +router.get('/basic-auth', accountController.getBasicAuth) + +router.post('/basic-auth', accountController.checkBasicAuth) + +export default router diff --git a/packages/server/src/enterprise/routes/audit/index.ts b/packages/server/src/enterprise/routes/audit/index.ts new file mode 100644 index 000000000..5ddd7d547 --- /dev/null +++ b/packages/server/src/enterprise/routes/audit/index.ts @@ -0,0 +1,9 @@ +import express from 'express' +import auditController from '../../controllers/audit' +import { checkPermission } from '../../rbac/PermissionCheck' +const router = express.Router() + +router.post(['/', '/login-activity'], checkPermission('loginActivity:view'), auditController.fetchLoginActivity) +router.post(['/', '/login-activity/delete'], checkPermission('loginActivity:delete'), auditController.deleteLoginActivity) + +export default router diff --git a/packages/server/src/enterprise/routes/auth/index.ts b/packages/server/src/enterprise/routes/auth/index.ts new file mode 100644 index 000000000..494b30ccb --- /dev/null +++ b/packages/server/src/enterprise/routes/auth/index.ts @@ -0,0 +1,10 @@ +import express from 'express' +import authController from '../../controllers/auth' +const router = express.Router() + +// RBAC +router.get(['/', '/permissions'], authController.getAllPermissions) + +router.get(['/sso-success'], authController.ssoSuccess) + +export default router diff --git a/packages/server/src/enterprise/routes/login-method.route.ts b/packages/server/src/enterprise/routes/login-method.route.ts new file mode 100644 index 000000000..f1c3912e2 --- /dev/null +++ b/packages/server/src/enterprise/routes/login-method.route.ts @@ -0,0 +1,18 @@ +import express from 'express' +import { LoginMethodController } from '../controllers/login-method.controller' +import { checkPermission } from '../rbac/PermissionCheck' + +const router = express.Router() +const loginMethodController = new LoginMethodController() + +router.get('/', loginMethodController.read) + +router.get('/default', loginMethodController.defaultMethods) + +router.post('/', checkPermission('sso:manage'), loginMethodController.create) + +router.put('/', checkPermission('sso:manage'), loginMethodController.update) + +router.post('/test', checkPermission('sso:manage'), loginMethodController.testConfig) + +export default router diff --git a/packages/server/src/enterprise/routes/organization-user.route.ts b/packages/server/src/enterprise/routes/organization-user.route.ts new file mode 100644 index 000000000..99241756e --- /dev/null +++ b/packages/server/src/enterprise/routes/organization-user.route.ts @@ -0,0 +1,17 @@ +import express from 'express' +import { OrganizationUserController } from '../controllers/organization-user.controller' +import { checkPermission } from '../rbac/PermissionCheck' +import { IdentityManager } from '../../IdentityManager' + +const router = express.Router() +const organizationUserController = new OrganizationUserController() + +router.get('/', organizationUserController.read) + +router.post('/', IdentityManager.checkFeatureByPlan('feat:users'), checkPermission('users:manage'), organizationUserController.create) + +router.put('/', IdentityManager.checkFeatureByPlan('feat:users'), checkPermission('users:manage'), organizationUserController.update) + +router.delete('/', IdentityManager.checkFeatureByPlan('feat:users'), checkPermission('users:manage'), organizationUserController.delete) + +export default router diff --git a/packages/server/src/enterprise/routes/organization.route.ts b/packages/server/src/enterprise/routes/organization.route.ts new file mode 100644 index 000000000..52dc17c26 --- /dev/null +++ b/packages/server/src/enterprise/routes/organization.route.ts @@ -0,0 +1,27 @@ +import express from 'express' +import { OrganizationController } from '../controllers/organization.controller' + +const router = express.Router() +const organizationController = new OrganizationController() + +router.get('/', organizationController.read) + +router.post('/', organizationController.create) + +router.put('/', organizationController.update) + +router.get('/additional-seats-quantity', organizationController.getAdditionalSeatsQuantity) + +router.get('/customer-default-source', organizationController.getCustomerWithDefaultSource) + +router.get('/additional-seats-proration', organizationController.getAdditionalSeatsProration) + +router.post('/update-additional-seats', organizationController.updateAdditionalSeats) + +router.get('/plan-proration', organizationController.getPlanProration) + +router.post('/update-subscription-plan', organizationController.updateSubscriptionPlan) + +router.get('/get-current-usage', organizationController.getCurrentUsage) + +export default router diff --git a/packages/server/src/enterprise/routes/role.route.ts b/packages/server/src/enterprise/routes/role.route.ts new file mode 100644 index 000000000..19225ba8c --- /dev/null +++ b/packages/server/src/enterprise/routes/role.route.ts @@ -0,0 +1,16 @@ +import express from 'express' +import { RoleController } from '../controllers/role.controller' +import { checkPermission } from '../rbac/PermissionCheck' + +const router = express.Router() +const roleController = new RoleController() + +router.get('/', roleController.read) + +router.post('/', checkPermission('roles:manage'), roleController.create) + +router.put('/', checkPermission('roles:manage'), roleController.update) + +router.delete('/', checkPermission('roles:manage'), roleController.delete) + +export default router diff --git a/packages/server/src/enterprise/routes/user.route.ts b/packages/server/src/enterprise/routes/user.route.ts new file mode 100644 index 000000000..dcfc50487 --- /dev/null +++ b/packages/server/src/enterprise/routes/user.route.ts @@ -0,0 +1,14 @@ +import express from 'express' +import { UserController } from '../controllers/user.controller' + +const router = express.Router() +const userController = new UserController() + +router.get('/', userController.read) +router.get('/test', userController.test) + +router.post('/', userController.create) + +router.put('/', userController.update) + +export default router diff --git a/packages/server/src/enterprise/routes/workspace-user.route.ts b/packages/server/src/enterprise/routes/workspace-user.route.ts new file mode 100644 index 000000000..12ec82447 --- /dev/null +++ b/packages/server/src/enterprise/routes/workspace-user.route.ts @@ -0,0 +1,33 @@ +import express from 'express' +import { WorkspaceUserController } from '../controllers/workspace-user.controller' +import { IdentityManager } from '../../IdentityManager' +import { checkPermission } from '../rbac/PermissionCheck' + +const router = express.Router() +const workspaceUserController = new WorkspaceUserController() + +// no feature flag because user with lower plan can read invited workspaces with higher plan +router.get('/', workspaceUserController.read) + +router.post( + '/', + IdentityManager.checkFeatureByPlan('feat:workspaces'), + checkPermission('workspace:add-user'), + workspaceUserController.create +) + +router.put( + '/', + IdentityManager.checkFeatureByPlan('feat:workspaces'), + checkPermission('workspace:add-user'), + workspaceUserController.update +) + +router.delete( + '/', + IdentityManager.checkFeatureByPlan('feat:workspaces'), + checkPermission('workspace:unlink-user'), + workspaceUserController.delete +) + +export default router diff --git a/packages/server/src/enterprise/routes/workspace.route.ts b/packages/server/src/enterprise/routes/workspace.route.ts new file mode 100644 index 000000000..2e27aa91b --- /dev/null +++ b/packages/server/src/enterprise/routes/workspace.route.ts @@ -0,0 +1,38 @@ +import express from 'express' +import { WorkspaceController } from '../controllers/workspace.controller' +import { IdentityManager } from '../../IdentityManager' +import { checkPermission } from '../rbac/PermissionCheck' + +const router = express.Router() +const workspaceController = new WorkspaceController() + +router.get('/', IdentityManager.checkFeatureByPlan('feat:workspaces'), checkPermission('workspace:view'), workspaceController.read) + +router.post('/', IdentityManager.checkFeatureByPlan('feat:workspaces'), checkPermission('workspace:create'), workspaceController.create) + +// no feature flag because user with lower plan can switch to invited workspaces with higher plan +router.post('/switch', workspaceController.switchWorkspace) + +router.put('/', IdentityManager.checkFeatureByPlan('feat:workspaces'), checkPermission('workspace:update'), workspaceController.update) + +router.delete( + ['/', '/:id'], + IdentityManager.checkFeatureByPlan('feat:workspaces'), + checkPermission('workspace:delete'), + workspaceController.delete +) + +router.get( + ['/shared', '/shared/:id'], + IdentityManager.checkFeatureByPlan('feat:workspaces'), + checkPermission('workspace:create'), + workspaceController.getSharedWorkspacesForItem +) +router.post( + ['/shared', '/shared/:id'], + IdentityManager.checkFeatureByPlan('feat:workspaces'), + checkPermission('workspace:create'), + workspaceController.setSharedWorkspacesForItem +) + +export default router diff --git a/packages/server/src/enterprise/services/account.service.ts b/packages/server/src/enterprise/services/account.service.ts new file mode 100644 index 000000000..f2894f487 --- /dev/null +++ b/packages/server/src/enterprise/services/account.service.ts @@ -0,0 +1,604 @@ +import bcrypt from 'bcryptjs' +import { StatusCodes } from 'http-status-codes' +import moment from 'moment' +import { DataSource, QueryRunner } from 'typeorm' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { IdentityManager } from '../../IdentityManager' +import { Platform, UserPlan } from '../../Interface' +import { GeneralErrorMessage } from '../../utils/constants' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { checkUsageLimit } from '../../utils/quotaUsage' +import { OrganizationUser, OrganizationUserStatus } from '../database/entities/organization-user.entity' +import { Organization, OrganizationName } from '../database/entities/organization.entity' +import { GeneralRole, Role } from '../database/entities/role.entity' +import { User, UserStatus } from '../database/entities/user.entity' +import { WorkspaceUser, WorkspaceUserStatus } from '../database/entities/workspace-user.entity' +import { Workspace, WorkspaceName } from '../database/entities/workspace.entity' +import { LoggedInUser, LoginActivityCode } from '../Interface.Enterprise' +import { compareHash } from '../utils/encryption.util' +import { sendPasswordResetEmail, sendVerificationEmailForCloud, sendWorkspaceAdd, sendWorkspaceInvite } from '../utils/sendEmail' +import { generateTempToken } from '../utils/tempTokenUtils' +import auditService from './audit' +import { OrganizationUserErrorMessage, OrganizationUserService } from './organization-user.service' +import { OrganizationErrorMessage, OrganizationService } from './organization.service' +import { RoleErrorMessage, RoleService } from './role.service' +import { UserErrorMessage, UserService } from './user.service' +import { WorkspaceUserErrorMessage, WorkspaceUserService } from './workspace-user.service' +import { WorkspaceErrorMessage, WorkspaceService } from './workspace.service' +import { sanitizeUser } from '../../utils/sanitize.util' +import { destroyAllSessionsForUser } from '../middleware/passport/SessionPersistance' + +type AccountDTO = { + user: Partial + organization: Partial + organizationUser: Partial + workspace: Partial + workspaceUser: Partial + role: Partial +} + +export class AccountService { + private dataSource: DataSource + private userService: UserService + private organizationservice: OrganizationService + private workspaceService: WorkspaceService + private roleService: RoleService + private organizationUserService: OrganizationUserService + private workspaceUserService: WorkspaceUserService + private identityManager: IdentityManager + + constructor() { + const appServer = getRunningExpressApp() + this.dataSource = appServer.AppDataSource + this.userService = new UserService() + this.organizationservice = new OrganizationService() + this.workspaceService = new WorkspaceService() + this.roleService = new RoleService() + this.organizationUserService = new OrganizationUserService() + this.workspaceUserService = new WorkspaceUserService() + this.identityManager = appServer.identityManager + } + + private initializeAccountDTO(data: AccountDTO) { + data.organization = data.organization || {} + data.organizationUser = data.organizationUser || {} + data.workspace = data.workspace || {} + data.workspaceUser = data.workspaceUser || {} + data.role = data.role || {} + + return data + } + + public async resendVerificationEmail({ email }: { email: string }) { + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + try { + await queryRunner.startTransaction() + + const user = await this.userService.readUserByEmail(email, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + if (user && user.status === UserStatus.ACTIVE) + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_EMAIL_ALREADY_EXISTS) + + if (!user.email) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, UserErrorMessage.INVALID_USER_EMAIL) + + const updateUserData: Partial = {} + updateUserData.tempToken = generateTempToken() + const tokenExpiry = new Date() + const expiryInHours = process.env.INVITE_TOKEN_EXPIRY_IN_HOURS ? parseInt(process.env.INVITE_TOKEN_EXPIRY_IN_HOURS) : 24 + tokenExpiry.setHours(tokenExpiry.getHours() + expiryInHours) + updateUserData.tokenExpiry = tokenExpiry + + // Update user with new token and expiry + const updatedUser = queryRunner.manager.merge(User, user, updateUserData) + await queryRunner.manager.save(User, updatedUser) + + // resend invite + const verificationLink = `${process.env.APP_URL}/verify?token=${updateUserData.tempToken}` + await sendVerificationEmailForCloud(email, verificationLink) + + await queryRunner.commitTransaction() + } catch (error) { + await queryRunner.rollbackTransaction() + throw error + } finally { + await queryRunner.release() + } + } + + private async ensureOneOrganizationOnly(queryRunner: QueryRunner) { + const organizations = await this.organizationservice.readOrganization(queryRunner) + if (organizations.length > 0) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, 'You can only have one organization') + } + + private async createRegisterAccount(data: AccountDTO, queryRunner: QueryRunner) { + data = this.initializeAccountDTO(data) + + const platform = this.identityManager.getPlatformType() + + switch (platform) { + case Platform.OPEN_SOURCE: + await this.ensureOneOrganizationOnly(queryRunner) + data.organization.name = OrganizationName.DEFAULT_ORGANIZATION + data.organizationUser.role = await this.roleService.readGeneralRoleByName(GeneralRole.OWNER, queryRunner) + data.workspace.name = WorkspaceName.DEFAULT_WORKSPACE + data.workspaceUser.role = data.organizationUser.role + data.user.status = UserStatus.ACTIVE + data.user = await this.userService.createNewUser(data.user, queryRunner) + break + case Platform.CLOUD: { + const user = await this.userService.readUserByEmail(data.user.email, queryRunner) + if (user && (user.status === UserStatus.ACTIVE || user.status === UserStatus.UNVERIFIED)) + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_EMAIL_ALREADY_EXISTS) + + if (!data.user.email) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, UserErrorMessage.INVALID_USER_EMAIL) + const { customerId, subscriptionId } = await this.identityManager.createStripeUserAndSubscribe({ + email: data.user.email, + userPlan: UserPlan.FREE, + // @ts-ignore + referral: data.user.referral || '' + }) + data.organization.customerId = customerId + data.organization.subscriptionId = subscriptionId + + // if credential exists then the user is signing up with email/password + // if not then the user is signing up with oauth/sso + if (data.user.credential) { + data.user.status = UserStatus.UNVERIFIED + data.user.tempToken = generateTempToken() + const tokenExpiry = new Date() + const expiryInHours = process.env.INVITE_TOKEN_EXPIRY_IN_HOURS ? parseInt(process.env.INVITE_TOKEN_EXPIRY_IN_HOURS) : 24 + tokenExpiry.setHours(tokenExpiry.getHours() + expiryInHours) + data.user.tokenExpiry = tokenExpiry + } else { + data.user.status = UserStatus.ACTIVE + data.user.tempToken = '' + data.user.tokenExpiry = null + } + data.organization.name = OrganizationName.DEFAULT_ORGANIZATION + data.organizationUser.role = await this.roleService.readGeneralRoleByName(GeneralRole.OWNER, queryRunner) + data.workspace.name = WorkspaceName.DEFAULT_WORKSPACE + data.workspaceUser.role = data.organizationUser.role + if (!user) { + data.user = await this.userService.createNewUser(data.user, queryRunner) + } else { + if (data.user.credential) data.user.credential = this.userService.encryptUserCredential(data.user.credential) + data.user.updatedBy = user.id + data.user = queryRunner.manager.merge(User, user, data.user) + } + // send verification email only if user signed up with email/password + if (data.user.credential) { + const verificationLink = `${process.env.APP_URL}/verify?token=${data.user.tempToken}` + await sendVerificationEmailForCloud(data.user.email!, verificationLink) + } + break + } + case Platform.ENTERPRISE: { + if (data.user.tempToken) { + const user = await this.userService.readUserByToken(data.user.tempToken, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + if (user.email.toLowerCase() !== data.user.email?.toLowerCase()) + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, UserErrorMessage.INVALID_USER_EMAIL) + const name = data.user.name + if (data.user.credential) user.credential = this.userService.encryptUserCredential(data.user.credential) + data.user = user + const organizationUser = await this.organizationUserService.readOrganizationUserByUserId(user.id, queryRunner) + if (!organizationUser) + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationUserErrorMessage.ORGANIZATION_USER_NOT_FOUND) + const assignedOrganization = await this.organizationservice.readOrganizationById( + organizationUser[0].organizationId, + queryRunner + ) + if (!assignedOrganization) + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationErrorMessage.ORGANIZATION_NOT_FOUND) + data.organization = assignedOrganization + const tokenExpiry = new Date(user.tokenExpiry!) + const today = new Date() + if (today > tokenExpiry) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, UserErrorMessage.EXPIRED_TEMP_TOKEN) + data.user.tempToken = '' + data.user.tokenExpiry = null + data.user.name = name + data.user.status = UserStatus.ACTIVE + data.organizationUser.status = OrganizationUserStatus.ACTIVE + data.organizationUser.role = await this.roleService.readGeneralRoleByName(GeneralRole.MEMBER, queryRunner) + data.workspace.name = WorkspaceName.DEFAULT_PERSONAL_WORKSPACE + data.workspaceUser.role = await this.roleService.readGeneralRoleByName(GeneralRole.PERSONAL_WORKSPACE, queryRunner) + } else { + await this.ensureOneOrganizationOnly(queryRunner) + data.organizationUser.role = await this.roleService.readGeneralRoleByName(GeneralRole.OWNER, queryRunner) + data.workspace.name = WorkspaceName.DEFAULT_WORKSPACE + data.workspaceUser.role = data.organizationUser.role + data.user.status = UserStatus.ACTIVE + data.user = await this.userService.createNewUser(data.user, queryRunner) + } + break + } + default: + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, GeneralErrorMessage.UNHANDLED_EDGE_CASE) + } + + if (!data.organization.id) { + data.organization.createdBy = data.user.createdBy + data.organization = this.organizationservice.createNewOrganization(data.organization, queryRunner, true) + } + data.organizationUser.organizationId = data.organization.id + data.organizationUser.userId = data.user.id + data.organizationUser.createdBy = data.user.createdBy + data.organizationUser = this.organizationUserService.createNewOrganizationUser(data.organizationUser, queryRunner) + data.workspace.organizationId = data.organization.id + data.workspace.createdBy = data.user.createdBy + data.workspace = this.workspaceService.createNewWorkspace(data.workspace, queryRunner, true) + data.workspaceUser.workspaceId = data.workspace.id + data.workspaceUser.userId = data.user.id + data.workspaceUser.createdBy = data.user.createdBy + data.workspaceUser.status = WorkspaceUserStatus.ACTIVE + data.workspaceUser = this.workspaceUserService.createNewWorkspaceUser(data.workspaceUser, queryRunner) + + return data + } + + private async saveRegisterAccount(data: AccountDTO) { + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + const platform = this.identityManager.getPlatformType() + const ownerRole = await this.roleService.readGeneralRoleByName(GeneralRole.OWNER, queryRunner) + + try { + data = await this.createRegisterAccount(data, queryRunner) + + await queryRunner.startTransaction() + data.user = await this.userService.saveUser(data.user, queryRunner) + data.organization = await this.organizationservice.saveOrganization(data.organization, queryRunner) + data.organizationUser = await this.organizationUserService.saveOrganizationUser(data.organizationUser, queryRunner) + data.workspace = await this.workspaceService.saveWorkspace(data.workspace, queryRunner) + data.workspaceUser = await this.workspaceUserService.saveWorkspaceUser(data.workspaceUser, queryRunner) + if ( + data.workspace.id && + (platform === Platform.OPEN_SOURCE || platform === Platform.ENTERPRISE) && + ownerRole.id === data.organizationUser.roleId + ) { + await this.workspaceService.setNullWorkspaceId(queryRunner, data.workspace.id) + } + await queryRunner.commitTransaction() + + delete data.user.credential + delete data.user.tempToken + delete data.user.tokenExpiry + + return data + } catch (error) { + if (queryRunner && queryRunner.isTransactionActive) await queryRunner.rollbackTransaction() + throw error + } finally { + if (queryRunner && !queryRunner.isReleased) await queryRunner.release() + } + } + + public async register(data: AccountDTO) { + return await this.saveRegisterAccount(data) + } + + private async saveInviteAccount(data: AccountDTO, currentUser?: Express.User) { + data = this.initializeAccountDTO(data) + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + + try { + const workspace = await this.workspaceService.readWorkspaceById(data.workspace.id, queryRunner) + if (!workspace) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, WorkspaceErrorMessage.WORKSPACE_NOT_FOUND) + data.workspace = workspace + + const totalOrgUsers = await this.organizationUserService.readOrgUsersCountByOrgId(data.workspace.organizationId || '') + const subscriptionId = currentUser?.activeOrganizationSubscriptionId || '' + + const role = await this.roleService.readRoleByRoleIdOrganizationId(data.role.id, data.workspace.organizationId, queryRunner) + if (!role) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + data.role = role + const user = await this.userService.readUserByEmail(data.user.email, queryRunner) + if (!user) { + await checkUsageLimit('users', subscriptionId, getRunningExpressApp().usageCacheManager, totalOrgUsers + 1) + + // generate a temporary token + data.user.tempToken = generateTempToken() + const tokenExpiry = new Date() + // set expiry based on env setting and fallback to 24 hours + const expiryInHours = process.env.INVITE_TOKEN_EXPIRY_IN_HOURS ? parseInt(process.env.INVITE_TOKEN_EXPIRY_IN_HOURS) : 24 + tokenExpiry.setHours(tokenExpiry.getHours() + expiryInHours) + data.user.tokenExpiry = tokenExpiry + data.user.status = UserStatus.INVITED + // send invite + const registerLink = + this.identityManager.getPlatformType() === Platform.ENTERPRISE + ? `${process.env.APP_URL}/register?token=${data.user.tempToken}` + : `${process.env.APP_URL}/register` + await sendWorkspaceInvite(data.user.email!, data.workspace.name!, registerLink, this.identityManager.getPlatformType()) + data.user = await this.userService.createNewUser(data.user, queryRunner) + + data.organizationUser.organizationId = data.workspace.organizationId + data.organizationUser.userId = data.user.id + const roleMember = await this.roleService.readGeneralRoleByName(GeneralRole.MEMBER, queryRunner) + data.organizationUser.roleId = roleMember.id + data.organizationUser.createdBy = data.user.createdBy + data.organizationUser.status = OrganizationUserStatus.INVITED + data.organizationUser = await this.organizationUserService.createNewOrganizationUser(data.organizationUser, queryRunner) + + workspace.updatedBy = data.user.createdBy + + data.workspaceUser.workspaceId = data.workspace.id + data.workspaceUser.userId = data.user.id + data.workspaceUser.roleId = data.role.id + data.workspaceUser.createdBy = data.user.createdBy + data.workspaceUser.status = WorkspaceUserStatus.INVITED + data.workspaceUser = await this.workspaceUserService.createNewWorkspaceUser(data.workspaceUser, queryRunner) + + await queryRunner.startTransaction() + data.user = await this.userService.saveUser(data.user, queryRunner) + await this.workspaceService.saveWorkspace(workspace, queryRunner) + data.organizationUser = await this.organizationUserService.saveOrganizationUser(data.organizationUser, queryRunner) + data.workspaceUser = await this.workspaceUserService.saveWorkspaceUser(data.workspaceUser, queryRunner) + data.role = await this.roleService.saveRole(data.role, queryRunner) + await queryRunner.commitTransaction() + delete data.user.credential + delete data.user.tempToken + delete data.user.tokenExpiry + + return data + } + const { organizationUser } = await this.organizationUserService.readOrganizationUserByOrganizationIdUserId( + data.workspace.organizationId, + user.id, + queryRunner + ) + if (!organizationUser) { + await checkUsageLimit('users', subscriptionId, getRunningExpressApp().usageCacheManager, totalOrgUsers + 1) + data.organizationUser.organizationId = data.workspace.organizationId + data.organizationUser.userId = user.id + const roleMember = await this.roleService.readGeneralRoleByName(GeneralRole.MEMBER, queryRunner) + data.organizationUser.roleId = roleMember.id + data.organizationUser.createdBy = data.user.createdBy + data.organizationUser.status = OrganizationUserStatus.INVITED + data.organizationUser = await this.organizationUserService.createNewOrganizationUser(data.organizationUser, queryRunner) + } else { + data.organizationUser = organizationUser + } + + let oldWorkspaceUser + if (data.organizationUser.status === OrganizationUserStatus.INVITED) { + const workspaceUser = await this.workspaceUserService.readWorkspaceUserByOrganizationIdUserId( + data.workspace.organizationId, + user.id, + queryRunner + ) + let registerLink: string + if (this.identityManager.getPlatformType() === Platform.ENTERPRISE) { + data.user = user + data.user.tempToken = generateTempToken() + const tokenExpiry = new Date() + const expiryInHours = process.env.INVITE_TOKEN_EXPIRY_IN_HOURS ? parseInt(process.env.INVITE_TOKEN_EXPIRY_IN_HOURS) : 24 + tokenExpiry.setHours(tokenExpiry.getHours() + expiryInHours) + data.user.tokenExpiry = tokenExpiry + await this.userService.saveUser(data.user, queryRunner) + registerLink = `${process.env.APP_URL}/register?token=${data.user.tempToken}` + } else { + registerLink = `${process.env.APP_URL}/register` + } + if (workspaceUser.length === 1) { + oldWorkspaceUser = workspaceUser[0] + if (oldWorkspaceUser.workspace.name === WorkspaceName.DEFAULT_PERSONAL_WORKSPACE) { + await sendWorkspaceInvite( + data.user.email!, + data.workspace.name!, + registerLink, + this.identityManager.getPlatformType() + ) + } else { + await sendWorkspaceInvite( + data.user.email!, + data.workspace.name!, + registerLink, + this.identityManager.getPlatformType(), + 'update' + ) + } + } else { + await sendWorkspaceInvite(data.user.email!, data.workspace.name!, registerLink, this.identityManager.getPlatformType()) + } + } else { + data.organizationUser.updatedBy = data.user.createdBy + + const dashboardLink = `${process.env.APP_URL}` + await sendWorkspaceAdd(data.user.email!, data.workspace.name!, dashboardLink) + } + + workspace.updatedBy = data.user.createdBy + + data.workspaceUser.workspaceId = data.workspace.id + data.workspaceUser.userId = user.id + data.workspaceUser.roleId = data.role.id + data.workspaceUser.createdBy = data.user.createdBy + data.workspaceUser.status = WorkspaceUserStatus.INVITED + data.workspaceUser = await this.workspaceUserService.createNewWorkspaceUser(data.workspaceUser, queryRunner) + + const personalWorkspaceRole = await this.roleService.readGeneralRoleByName(GeneralRole.PERSONAL_WORKSPACE, queryRunner) + if (oldWorkspaceUser && oldWorkspaceUser.roleId !== personalWorkspaceRole.id) { + await this.workspaceUserService.deleteWorkspaceUser(oldWorkspaceUser.workspaceId, user.id) + } + + await queryRunner.startTransaction() + data.organizationUser = await this.organizationUserService.saveOrganizationUser(data.organizationUser, queryRunner) + await this.workspaceService.saveWorkspace(workspace, queryRunner) + data.workspaceUser = await this.workspaceUserService.saveWorkspaceUser(data.workspaceUser, queryRunner) + data.role = await this.roleService.saveRole(data.role, queryRunner) + await queryRunner.commitTransaction() + + return data + } catch (error) { + if (queryRunner && queryRunner.isTransactionActive) await queryRunner.rollbackTransaction() + throw error + } finally { + if (queryRunner && !queryRunner.isReleased) await queryRunner.release() + } + } + + public async invite(data: AccountDTO, user?: Express.User) { + return await this.saveInviteAccount(data, user) + } + + public async login(data: AccountDTO) { + data = this.initializeAccountDTO(data) + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + const platform = this.identityManager.getPlatformType() + try { + if (!data.user.credential) { + await auditService.recordLoginActivity(data.user.email || '', LoginActivityCode.INCORRECT_CREDENTIAL, 'Login Failed') + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, UserErrorMessage.INVALID_USER_CREDENTIAL) + } + const user = await this.userService.readUserByEmail(data.user.email, queryRunner) + if (!user) { + await auditService.recordLoginActivity(data.user.email || '', LoginActivityCode.UNKNOWN_USER, 'Login Failed') + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + } + if (!user.credential) { + await auditService.recordLoginActivity(user.email || '', LoginActivityCode.INCORRECT_CREDENTIAL, 'Login Failed') + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, UserErrorMessage.INVALID_USER_CREDENTIAL) + } + if (!compareHash(data.user.credential, user.credential)) { + await auditService.recordLoginActivity(user.email || '', LoginActivityCode.INCORRECT_CREDENTIAL, 'Login Failed') + throw new InternalFlowiseError(StatusCodes.UNAUTHORIZED, UserErrorMessage.INCORRECT_USER_EMAIL_OR_CREDENTIALS) + } + if (user.status === UserStatus.UNVERIFIED) { + await auditService.recordLoginActivity(data.user.email || '', LoginActivityCode.REGISTRATION_PENDING, 'Login Failed') + throw new InternalFlowiseError(StatusCodes.UNAUTHORIZED, UserErrorMessage.USER_EMAIL_UNVERIFIED) + } + let wsUserOrUsers = await this.workspaceUserService.readWorkspaceUserByLastLogin(user.id, queryRunner) + if (Array.isArray(wsUserOrUsers)) { + if (wsUserOrUsers.length > 0) { + wsUserOrUsers = wsUserOrUsers[0] + } else { + await auditService.recordLoginActivity(user.email || '', LoginActivityCode.NO_ASSIGNED_WORKSPACE, 'Login Failed') + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, WorkspaceUserErrorMessage.WORKSPACE_USER_NOT_FOUND) + } + } + if (platform === Platform.ENTERPRISE) { + await auditService.recordLoginActivity(user.email, LoginActivityCode.LOGIN_SUCCESS, 'Login Success') + } + return { user, workspaceDetails: wsUserOrUsers } + } finally { + await queryRunner.release() + } + } + + public async verify(data: AccountDTO) { + data = this.initializeAccountDTO(data) + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + try { + await queryRunner.startTransaction() + if (!data.user.tempToken) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, UserErrorMessage.INVALID_TEMP_TOKEN) + const user = await this.userService.readUserByToken(data.user.tempToken, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + data.user = user + data.user.tempToken = '' + data.user.tokenExpiry = null + data.user.status = UserStatus.ACTIVE + data.user = await this.userService.saveUser(data.user, queryRunner) + await queryRunner.commitTransaction() + } catch (error) { + await queryRunner.rollbackTransaction() + throw error + } finally { + await queryRunner.release() + } + + return data + } + + public async forgotPassword(data: AccountDTO) { + data = this.initializeAccountDTO(data) + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + try { + await queryRunner.startTransaction() + const user = await this.userService.readUserByEmail(data.user.email, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + + data.user = user + data.user.tempToken = generateTempToken() + const tokenExpiry = new Date() + const expiryInMins = process.env.PASSWORD_RESET_TOKEN_EXPIRY_IN_MINUTES + ? parseInt(process.env.PASSWORD_RESET_TOKEN_EXPIRY_IN_MINUTES) + : 15 + tokenExpiry.setMinutes(tokenExpiry.getMinutes() + expiryInMins) + data.user.tokenExpiry = tokenExpiry + data.user = await this.userService.saveUser(data.user, queryRunner) + const resetLink = `${process.env.APP_URL}/reset-password?token=${data.user.tempToken}` + await sendPasswordResetEmail(data.user.email!, resetLink) + await queryRunner.commitTransaction() + } catch (error) { + await queryRunner.rollbackTransaction() + throw error + } finally { + await queryRunner.release() + } + + return sanitizeUser(data.user) + } + + public async resetPassword(data: AccountDTO) { + data = this.initializeAccountDTO(data) + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + try { + const user = await this.userService.readUserByEmail(data.user.email, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + if (user.tempToken !== data.user.tempToken) + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, UserErrorMessage.INVALID_TEMP_TOKEN) + + const tokenExpiry = user.tokenExpiry + const now = moment() + const expiryInMins = process.env.PASSWORD_RESET_TOKEN_EXPIRY_IN_MINUTES + ? parseInt(process.env.PASSWORD_RESET_TOKEN_EXPIRY_IN_MINUTES) + : 15 + const diff = now.diff(tokenExpiry, 'minutes') + if (Math.abs(diff) > expiryInMins) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, UserErrorMessage.EXPIRED_TEMP_TOKEN) + + // all checks are done, now update the user password, don't forget to hash it and do not forget to clear the temp token + // leave the user status and other details as is + const salt = bcrypt.genSaltSync(parseInt(process.env.PASSWORD_SALT_HASH_ROUNDS || '5')) + // @ts-ignore + const hash = bcrypt.hashSync(data.user.password, salt) + data.user = user + data.user.credential = hash + data.user.tempToken = '' + data.user.tokenExpiry = undefined + data.user.status = UserStatus.ACTIVE + + await queryRunner.startTransaction() + data.user = await this.userService.saveUser(data.user, queryRunner) + await queryRunner.commitTransaction() + + // Invalidate all sessions for this user after password reset + await destroyAllSessionsForUser(user.id as string) + } catch (error) { + await queryRunner.rollbackTransaction() + throw error + } finally { + await queryRunner.release() + } + + return sanitizeUser(data.user) + } + + public async logout(user: LoggedInUser) { + const platform = this.identityManager.getPlatformType() + if (platform === Platform.ENTERPRISE) { + await auditService.recordLoginActivity( + user.email, + LoginActivityCode.LOGOUT_SUCCESS, + 'Logout Success', + user.ssoToken ? 'SSO' : 'Email/Password' + ) + } + } +} diff --git a/packages/server/src/enterprise/services/audit/index.ts b/packages/server/src/enterprise/services/audit/index.ts new file mode 100644 index 000000000..a25204247 --- /dev/null +++ b/packages/server/src/enterprise/services/audit/index.ts @@ -0,0 +1,109 @@ +import { getRunningExpressApp } from '../../../utils/getRunningExpressApp' +import { LoginActivity } from '../../database/entities/EnterpriseEntities' +import { InternalFlowiseError } from '../../../errors/internalFlowiseError' +import { StatusCodes } from 'http-status-codes' +import { getErrorMessage } from '../../../errors/utils' +import { Between, In } from 'typeorm' +import { LoginActivityCode } from '../../Interface.Enterprise' +import { Platform } from '../../../Interface' + +const PAGE_SIZE = 10 + +const aMonthAgo = () => { + const date = new Date() + date.setMonth(new Date().getMonth() - 1) + return date +} + +const setDateToStartOrEndOfDay = (dateTimeStr: string, setHours: 'start' | 'end') => { + const date = new Date(dateTimeStr) + if (isNaN(date.getTime())) { + return undefined + } + setHours === 'start' ? date.setHours(0, 0, 0, 0) : date.setHours(23, 59, 59, 999) + return date +} + +const fetchLoginActivity = async (body: any) => { + try { + const page = body.pageNo ? parseInt(body.pageNo) : 1 + const skip = (page - 1) * PAGE_SIZE + const take = PAGE_SIZE + const appServer = getRunningExpressApp() + + let fromDate + if (body.startDate) fromDate = setDateToStartOrEndOfDay(body.startDate, 'start') + + let toDate + if (body.endDate) toDate = setDateToStartOrEndOfDay(body.endDate, 'end') + + const whereCondition: any = { + attemptedDateTime: Between(fromDate ?? aMonthAgo(), toDate ?? new Date()) + } + if (body.activityCodes && body.activityCodes?.length > 0) { + whereCondition['activityCode'] = In(body.activityCodes) + } + const count = await appServer.AppDataSource.getRepository(LoginActivity).count({ + where: whereCondition + }) + const pagedResults = await appServer.AppDataSource.getRepository(LoginActivity).find({ + where: whereCondition, + order: { + attemptedDateTime: 'DESC' + }, + skip, + take + }) + return { + data: pagedResults, + count: count, + currentPage: page, + pageSize: PAGE_SIZE + } + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: auditService.getLoginActivity - ${getErrorMessage(error)}` + ) + } +} + +const recordLoginActivity = async (username: string, activityCode: LoginActivityCode, message: string, ssoProvider?: string) => { + try { + const appServer = getRunningExpressApp() + const platform = appServer.identityManager.getPlatformType() + if (platform !== Platform.ENTERPRISE) { + return + } + const loginMode = ssoProvider ?? 'Email/Password' + const loginActivity = appServer.AppDataSource.getRepository(LoginActivity).create({ + username, + activityCode, + message, + loginMode + }) + const result = await appServer.AppDataSource.getRepository(LoginActivity).save(loginActivity) + return result + } catch (error) { + throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: authService.loginActivity - ${getErrorMessage(error)}`) + } +} + +const deleteLoginActivity = async (body: any) => { + try { + const appServer = getRunningExpressApp() + + await appServer.AppDataSource.getRepository(LoginActivity).delete({ + id: In(body.selected) + }) + return 'OK' + } catch (error) { + throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: authService.loginActivity - ${getErrorMessage(error)}`) + } +} + +export default { + recordLoginActivity, + deleteLoginActivity, + fetchLoginActivity +} diff --git a/packages/server/src/enterprise/services/login-method.service.ts b/packages/server/src/enterprise/services/login-method.service.ts new file mode 100644 index 000000000..5523f4191 --- /dev/null +++ b/packages/server/src/enterprise/services/login-method.service.ts @@ -0,0 +1,184 @@ +import { DataSource, QueryRunner } from 'typeorm' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { isInvalidName, isInvalidUUID } from '../utils/validation.util' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { StatusCodes } from 'http-status-codes' +import { LoginMethod, LoginMethodStatus } from '../database/entities/login-method.entity' +import { decrypt, encrypt } from '../utils/encryption.util' +import { UserErrorMessage, UserService } from './user.service' +import { OrganizationErrorMessage, OrganizationService } from './organization.service' +import { IsNull } from 'typeorm' + +export const enum LoginMethodErrorMessage { + INVALID_LOGIN_METHOD_ID = 'Invalid Login Method Id', + INVALID_LOGIN_METHOD_NAME = 'Invalid Login Method Name', + INVALID_LOGIN_METHOD_STATUS = 'Invalid Login Method Status', + INVALID_LOGIN_METHOD_CONFIG = 'Invalid Login Method Config', + LOGIN_METHOD_NOT_FOUND = 'Login Method Not Found' +} + +export class LoginMethodService { + private dataSource: DataSource + private userService: UserService + private organizationService: OrganizationService + + constructor() { + const appServer = getRunningExpressApp() + this.dataSource = appServer.AppDataSource + this.userService = new UserService() + this.organizationService = new OrganizationService() + } + + public validateLoginMethodId(id: string | undefined) { + if (isInvalidUUID(id)) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, LoginMethodErrorMessage.INVALID_LOGIN_METHOD_ID) + } + + public async readLoginMethodById(id: string | undefined, queryRunner: QueryRunner) { + this.validateLoginMethodId(id) + return await queryRunner.manager.findOneBy(LoginMethod, { id }) + } + + public validateLoginMethodName(name: string | undefined) { + if (isInvalidName(name)) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, LoginMethodErrorMessage.INVALID_LOGIN_METHOD_NAME) + } + + public validateLoginMethodStatus(status: string | undefined) { + if (status && !Object.values(LoginMethodStatus).includes(status as LoginMethodStatus)) + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, LoginMethodErrorMessage.INVALID_LOGIN_METHOD_STATUS) + } + + public async readLoginMethodByOrganizationId(organizationId: string | undefined, queryRunner: QueryRunner) { + if (organizationId) { + const organization = await this.organizationService.readOrganizationById(organizationId, queryRunner) + if (!organization) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationErrorMessage.ORGANIZATION_NOT_FOUND) + return await queryRunner.manager.findBy(LoginMethod, { organizationId }) + } else { + return await queryRunner.manager.findBy(LoginMethod, { organizationId: IsNull() }) + } + } + + public async encryptLoginMethodConfig(config: string | undefined) { + if (!config) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, LoginMethodErrorMessage.INVALID_LOGIN_METHOD_STATUS) + return await encrypt(config) + } + + public async decryptLoginMethodConfig(config: string | undefined) { + if (!config) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, LoginMethodErrorMessage.INVALID_LOGIN_METHOD_STATUS) + return await decrypt(config) + } + + private async saveLoginMethod(data: Partial, queryRunner: QueryRunner) { + return await queryRunner.manager.save(LoginMethod, data) + } + + public async createLoginMethod(data: Partial) { + let queryRunner: QueryRunner | undefined + let newLoginMethod: Partial + try { + queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + const createdBy = await this.userService.readUserById(data.createdBy, queryRunner) + if (!createdBy) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + const organization = await this.organizationService.readOrganizationById(data.organizationId, queryRunner) + if (!organization) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationErrorMessage.ORGANIZATION_NOT_FOUND) + this.validateLoginMethodName(data.name) + this.validateLoginMethodStatus(data.status) + data.config = await this.encryptLoginMethodConfig(data.config) + data.updatedBy = createdBy.id + + newLoginMethod = await queryRunner.manager.create(LoginMethod, data) + await queryRunner.startTransaction() + newLoginMethod = await this.saveLoginMethod(newLoginMethod, queryRunner) + await queryRunner.commitTransaction() + } catch (error) { + if (queryRunner && !queryRunner.isTransactionActive) await queryRunner.rollbackTransaction() + throw error + } finally { + if (queryRunner && !queryRunner.isReleased) await queryRunner.release() + } + + return newLoginMethod + } + + public async createOrUpdateConfig(body: any) { + let organizationId: string = body.organizationId + let providers: any[] = body.providers + let userId: string = body.userId + + let queryRunner + try { + queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + await queryRunner.startTransaction() + const createdOrUpdatedByUser = await this.userService.readUserById(userId, queryRunner) + if (!createdOrUpdatedByUser) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + const organization = await this.organizationService.readOrganizationById(organizationId, queryRunner) + if (!organization) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationErrorMessage.ORGANIZATION_NOT_FOUND) + + for (let provider of providers) { + this.validateLoginMethodName(provider.providerName) + this.validateLoginMethodStatus(provider.status) + + const name = provider.providerName + const loginMethod = await queryRunner.manager.findOneBy(LoginMethod, { organizationId, name }) + if (loginMethod) { + /* empty */ + loginMethod.status = provider.status + loginMethod.config = await this.encryptLoginMethodConfig(JSON.stringify(provider.config)) + loginMethod.updatedBy = userId + await this.saveLoginMethod(loginMethod, queryRunner) + } else { + const encryptedConfig = await this.encryptLoginMethodConfig(JSON.stringify(provider.config)) + let newLoginMethod = queryRunner.manager.create(LoginMethod, { + organizationId, + name, + status: provider.status, + config: encryptedConfig, + createdBy: userId, + updatedBy: userId + }) + await this.saveLoginMethod(newLoginMethod, queryRunner) + } + } + await queryRunner.commitTransaction() + } catch (error) { + if (queryRunner) await queryRunner.rollbackTransaction() + throw error + } finally { + if (queryRunner) await queryRunner.release() + } + return { status: 'OK', organizationId: organizationId } + } + + public async updateLoginMethod(newLoginMethod: Partial) { + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + + const oldLoginMethod = await this.readLoginMethodById(newLoginMethod.id, queryRunner) + if (!oldLoginMethod) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, LoginMethodErrorMessage.LOGIN_METHOD_NOT_FOUND) + const updatedBy = await this.userService.readUserById(newLoginMethod.updatedBy, queryRunner) + if (!updatedBy) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + if (newLoginMethod.organizationId) { + const organization = await this.organizationService.readOrganizationById(newLoginMethod.organizationId, queryRunner) + if (!organization) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationErrorMessage.ORGANIZATION_NOT_FOUND) + } + if (newLoginMethod.name) this.validateLoginMethodName(newLoginMethod.name) + if (newLoginMethod.config) newLoginMethod.config = await this.encryptLoginMethodConfig(newLoginMethod.config) + if (newLoginMethod.status) this.validateLoginMethodStatus(newLoginMethod.status) + newLoginMethod.createdBy = oldLoginMethod.createdBy + + let updateLoginMethod = queryRunner.manager.merge(LoginMethod, newLoginMethod) + try { + await queryRunner.startTransaction() + updateLoginMethod = await this.saveLoginMethod(updateLoginMethod, queryRunner) + await queryRunner.commitTransaction() + } catch (error) { + await queryRunner.rollbackTransaction() + throw error + } finally { + await queryRunner.release() + } + + return updateLoginMethod + } +} diff --git a/packages/server/src/enterprise/services/organization-user.service.ts b/packages/server/src/enterprise/services/organization-user.service.ts new file mode 100644 index 000000000..6e7eee717 --- /dev/null +++ b/packages/server/src/enterprise/services/organization-user.service.ts @@ -0,0 +1,336 @@ +import { StatusCodes } from 'http-status-codes' +import { DataSource, Not, QueryRunner } from 'typeorm' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { GeneralErrorMessage } from '../../utils/constants' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { OrganizationUser, OrganizationUserStatus } from '../database/entities/organization-user.entity' +import { Organization } from '../database/entities/organization.entity' +import { GeneralRole } from '../database/entities/role.entity' +import { WorkspaceUser } from '../database/entities/workspace-user.entity' +import { Workspace } from '../database/entities/workspace.entity' +import { OrganizationErrorMessage, OrganizationService } from './organization.service' +import { RoleErrorMessage, RoleService } from './role.service' +import { UserErrorMessage, UserService } from './user.service' +import { WorkspaceUserErrorMessage } from './workspace-user.service' + +export const enum OrganizationUserErrorMessage { + INVALID_ORGANIZATION_USER_SATUS = 'Invalid Organization User Status', + ORGANIZATION_USER_ALREADY_EXISTS = 'Organization User Already Exists', + ORGANIZATION_USER_NOT_FOUND = 'Organization User Not Found' +} + +export class OrganizationUserService { + private dataSource: DataSource + private userService: UserService + private organizationService: OrganizationService + private roleService: RoleService + + constructor() { + const appServer = getRunningExpressApp() + this.dataSource = appServer.AppDataSource + this.userService = new UserService() + this.organizationService = new OrganizationService() + this.roleService = new RoleService() + } + + public validateOrganizationUserStatus(status: string | undefined) { + if (status && !Object.values(OrganizationUserStatus).includes(status as OrganizationUserStatus)) + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, OrganizationUserErrorMessage.INVALID_ORGANIZATION_USER_SATUS) + } + + public async readOrganizationUserByOrganizationIdUserId( + organizationId: string | undefined, + userId: string | undefined, + queryRunner: QueryRunner + ) { + const organization = await this.organizationService.readOrganizationById(organizationId, queryRunner) + if (!organization) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationErrorMessage.ORGANIZATION_NOT_FOUND) + const user = await this.userService.readUserById(userId, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + const ownerRole = await this.roleService.readGeneralRoleByName(GeneralRole.OWNER, queryRunner) + + const organizationUser = await queryRunner.manager + .createQueryBuilder(OrganizationUser, 'organizationUser') + .innerJoinAndSelect('organizationUser.role', 'role') + .where('organizationUser.organizationId = :organizationId', { organizationId }) + .andWhere('organizationUser.userId = :userId', { userId }) + .getOne() + + return { + organization, + organizationUser: organizationUser + ? { + ...organizationUser, + isOrgOwner: organizationUser.roleId === ownerRole?.id + } + : null + } + } + + public async readOrganizationUserByWorkspaceIdUserId( + workspaceId: string | undefined, + userId: string | undefined, + queryRunner: QueryRunner + ) { + const workspace = await queryRunner.manager + .createQueryBuilder(WorkspaceUser, 'workspaceUser') + .innerJoinAndSelect('workspaceUser.workspace', 'workspace') + .innerJoinAndSelect('workspaceUser.user', 'user') + .innerJoinAndSelect('workspaceUser.role', 'role') + .where('workspace.id = :workspaceId', { workspaceId }) + .getOne() + if (!workspace) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, WorkspaceUserErrorMessage.WORKSPACE_USER_NOT_FOUND) + return await this.readOrganizationUserByOrganizationIdUserId(workspace.workspace.organizationId, userId, queryRunner) + } + + public async readOrganizationUserByOrganizationId(organizationId: string | undefined, queryRunner: QueryRunner) { + const organization = await this.organizationService.readOrganizationById(organizationId, queryRunner) + if (!organization) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationErrorMessage.ORGANIZATION_NOT_FOUND) + const ownerRole = await this.roleService.readGeneralRoleByName(GeneralRole.OWNER, queryRunner) + + const organizationUsers = await queryRunner.manager + .createQueryBuilder(OrganizationUser, 'organizationUser') + .innerJoinAndSelect('organizationUser.user', 'user') + .innerJoinAndSelect('organizationUser.role', 'role') + .where('organizationUser.organizationId = :organizationId', { organizationId }) + .getMany() + + // Get workspace user last login for all users + const workspaceUsers = await queryRunner.manager + .createQueryBuilder(WorkspaceUser, 'workspaceUser') + .where('workspaceUser.userId IN (:...userIds)', { + userIds: organizationUsers.map((user) => user.userId) + }) + .orderBy('workspaceUser.lastLogin', 'ASC') + .getMany() + + const lastLoginMap = new Map(workspaceUsers.map((wu) => [wu.userId, wu.lastLogin])) + + return await Promise.all( + organizationUsers.map(async (organizationUser) => { + const workspaceUser = await queryRunner.manager.findBy(WorkspaceUser, { + userId: organizationUser.userId, + workspace: { organizationId: organizationId } + }) + delete organizationUser.user.credential + delete organizationUser.user.tempToken + delete organizationUser.user.tokenExpiry + return { + ...organizationUser, + isOrgOwner: organizationUser.roleId === ownerRole?.id, + lastLogin: lastLoginMap.get(organizationUser.userId) || null, + roleCount: workspaceUser.length + } + }) + ) + } + + public async readOrganizationUserByOrganizationIdRoleId( + organizationId: string | undefined, + roleId: string | undefined, + queryRunner: QueryRunner + ) { + const organization = await this.organizationService.readOrganizationById(organizationId, queryRunner) + if (!organization) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationErrorMessage.ORGANIZATION_NOT_FOUND) + const role = await this.roleService.readRoleById(roleId, queryRunner) + if (!role) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + const ownerRole = await this.roleService.readGeneralRoleByName(GeneralRole.OWNER, queryRunner) + + const orgUsers = await queryRunner.manager + .createQueryBuilder(OrganizationUser, 'organizationUser') + .innerJoinAndSelect('organizationUser.role', 'role') + .innerJoinAndSelect('organizationUser.user', 'user') + .where('organizationUser.organizationId = :organizationId', { organizationId }) + .andWhere('organizationUser.roleId = :roleId', { roleId }) + .getMany() + + return orgUsers.map((organizationUser) => { + delete organizationUser.user.credential + delete organizationUser.user.tempToken + delete organizationUser.user.tokenExpiry + return { + ...organizationUser, + isOrgOwner: organizationUser.roleId === ownerRole?.id + } + }) + } + + public async readOrganizationUserByUserId(userId: string | undefined, queryRunner: QueryRunner) { + const user = await this.userService.readUserById(userId, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + const ownerRole = await this.roleService.readGeneralRoleByName(GeneralRole.OWNER, queryRunner) + + const orgUsers = await queryRunner.manager + .createQueryBuilder(OrganizationUser, 'organizationUser') + .innerJoinAndSelect('organizationUser.role', 'role') + .where('organizationUser.userId = :userId', { userId }) + .getMany() + + const organizationUsers = orgUsers.map((user) => ({ + ...user, + isOrgOwner: user.roleId === ownerRole?.id + })) + + // loop through organizationUsers, get the organizationId, find the organization user with the ownerRole.id, and get the user's details + for (const user of organizationUsers) { + const organizationOwner = await this.readOrganizationUserByOrganizationIdRoleId(user.organizationId, ownerRole?.id, queryRunner) + if (organizationOwner.length === 1) { + // get the user's name and email + const userDetails = await this.userService.readUserById(organizationOwner[0].userId, queryRunner) + if (userDetails) { + user.user = userDetails + } + } + } + + return organizationUsers + } + + public async readOrgUsersCountByOrgId(organizationId: string): Promise { + try { + const appServer = getRunningExpressApp() + const dbResponse = await appServer.AppDataSource.getRepository(OrganizationUser).countBy({ + organizationId + }) + return dbResponse + } catch (error) { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, OrganizationUserErrorMessage.ORGANIZATION_USER_NOT_FOUND) + } + } + + public createNewOrganizationUser(data: Partial, queryRunner: QueryRunner) { + if (data.status) this.validateOrganizationUserStatus(data.status) + data.updatedBy = data.createdBy + + return queryRunner.manager.create(OrganizationUser, data) + } + + public async saveOrganizationUser(data: Partial, queryRunner: QueryRunner) { + return await queryRunner.manager.save(OrganizationUser, data) + } + + public async createOrganizationUser(data: Partial) { + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + + const { organization, organizationUser } = await this.readOrganizationUserByOrganizationIdUserId( + data.organizationId, + data.userId, + queryRunner + ) + if (organizationUser) + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, OrganizationUserErrorMessage.ORGANIZATION_USER_ALREADY_EXISTS) + const role = await this.roleService.readRoleIsGeneral(data.roleId, queryRunner) + if (!role) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + const createdBy = await this.userService.readUserById(data.createdBy, queryRunner) + if (!createdBy) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + + let newOrganizationUser = this.createNewOrganizationUser(data, queryRunner) + organization.updatedBy = data.createdBy + try { + await queryRunner.startTransaction() + newOrganizationUser = await this.saveOrganizationUser(newOrganizationUser, queryRunner) + await this.organizationService.saveOrganization(organization, queryRunner) + await queryRunner.commitTransaction() + } catch (error) { + await queryRunner.rollbackTransaction() + throw error + } finally { + await queryRunner.release() + } + + return newOrganizationUser + } + + public async createOrganization(data: Partial) { + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + + const user = await this.userService.readUserById(data.createdBy, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + + let newOrganization = this.organizationService.createNewOrganization(data, queryRunner) + + const role = await this.roleService.readGeneralRoleByName(GeneralRole.OWNER, queryRunner) + if (!role) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + let newOrganizationUser: Partial = { + organizationId: newOrganization.id, + userId: user.id, + roleId: role.id, + createdBy: user.id + } + newOrganizationUser = this.createNewOrganizationUser(newOrganizationUser, queryRunner) + try { + await queryRunner.startTransaction() + newOrganization = await this.organizationService.saveOrganization(newOrganization, queryRunner) + await this.saveOrganizationUser(newOrganizationUser, queryRunner) + await queryRunner.commitTransaction() + } catch (error) { + await queryRunner.rollbackTransaction() + throw error + } finally { + await queryRunner.release() + } + + return newOrganization + } + + public async updateOrganizationUser(newOrganizationUser: Partial) { + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + + const { organizationUser } = await this.readOrganizationUserByOrganizationIdUserId( + newOrganizationUser.organizationId, + newOrganizationUser.userId, + queryRunner + ) + if (!organizationUser) + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationUserErrorMessage.ORGANIZATION_USER_NOT_FOUND) + + if (newOrganizationUser.roleId) { + const role = await this.roleService.readRoleIsGeneral(newOrganizationUser.roleId, queryRunner) + if (!role) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + } + + if (newOrganizationUser.status) this.validateOrganizationUserStatus(newOrganizationUser.status) + + newOrganizationUser.createdBy = organizationUser.createdBy + + let updateOrganizationUser = queryRunner.manager.merge(OrganizationUser, organizationUser, newOrganizationUser) + try { + await queryRunner.startTransaction() + updateOrganizationUser = await this.saveOrganizationUser(updateOrganizationUser, queryRunner) + await queryRunner.commitTransaction() + } catch (error) { + await queryRunner.rollbackTransaction() + throw error + } finally { + await queryRunner.release() + } + + return updateOrganizationUser + } + + public async deleteOrganizationUser(queryRunner: QueryRunner, organizationId: string | undefined, userId: string | undefined) { + const { organizationUser } = await this.readOrganizationUserByOrganizationIdUserId(organizationId, userId, queryRunner) + if (!organizationUser) + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationUserErrorMessage.ORGANIZATION_USER_NOT_FOUND) + const role = await this.roleService.readRoleById(organizationUser.roleId, queryRunner) + if (!role) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + if (role.name === GeneralRole.OWNER) + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, GeneralErrorMessage.NOT_ALLOWED_TO_DELETE_OWNER) + + const rolePersonalWorkspace = await this.roleService.readGeneralRoleByName(GeneralRole.PERSONAL_WORKSPACE, queryRunner) + const organizationWorkspaces = await queryRunner.manager.findBy(Workspace, { organizationId }) + const workspaceUserToDelete = organizationWorkspaces.map((organizationWorkspace) => ({ + workspaceId: organizationWorkspace.id, + userId: organizationUser.userId, + roleId: Not(rolePersonalWorkspace.id) + })) + + await queryRunner.manager.delete(OrganizationUser, { organizationId, userId }) + await queryRunner.manager.delete(WorkspaceUser, workspaceUserToDelete) + + return organizationUser + } +} diff --git a/packages/server/src/enterprise/services/organization.service.ts b/packages/server/src/enterprise/services/organization.service.ts new file mode 100644 index 000000000..9ee115467 --- /dev/null +++ b/packages/server/src/enterprise/services/organization.service.ts @@ -0,0 +1,121 @@ +import { StatusCodes } from 'http-status-codes' +import { DataSource, QueryRunner } from 'typeorm' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { generateId } from '../../utils' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { Telemetry } from '../../utils/telemetry' +import { Organization, OrganizationName } from '../database/entities/organization.entity' +import { isInvalidName, isInvalidUUID } from '../utils/validation.util' +import { UserErrorMessage, UserService } from './user.service' + +export const enum OrganizationErrorMessage { + INVALID_ORGANIZATION_ID = 'Invalid Organization Id', + INVALID_ORGANIZATION_NAME = 'Invalid Organization Name', + ORGANIZATION_NOT_FOUND = 'Organization Not Found', + ORGANIZATION_FOUND_MULTIPLE = 'Organization Found Multiple', + ORGANIZATION_RESERVERD_NAME = 'Organization name cannot be Default Organization - this is a reserved name' +} + +export class OrganizationService { + private telemetry: Telemetry + private dataSource: DataSource + private userService: UserService + + constructor() { + const appServer = getRunningExpressApp() + this.dataSource = appServer.AppDataSource + this.telemetry = appServer.telemetry + this.userService = new UserService() + } + + public validateOrganizationId(id: string | undefined) { + if (isInvalidUUID(id)) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, OrganizationErrorMessage.INVALID_ORGANIZATION_ID) + } + + public async readOrganizationById(id: string | undefined, queryRunner: QueryRunner) { + this.validateOrganizationId(id) + return await queryRunner.manager.findOneBy(Organization, { id }) + } + + public validateOrganizationName(name: string | undefined, isRegister: boolean = false) { + if (isInvalidName(name)) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, OrganizationErrorMessage.INVALID_ORGANIZATION_NAME) + if (!isRegister && name === OrganizationName.DEFAULT_ORGANIZATION) { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, OrganizationErrorMessage.ORGANIZATION_RESERVERD_NAME) + } + } + + public async readOrganizationByName(name: string | undefined, queryRunner: QueryRunner) { + this.validateOrganizationName(name) + return await queryRunner.manager.findOneBy(Organization, { name }) + } + + public async countOrganizations(queryRunner: QueryRunner) { + return await queryRunner.manager.count(Organization) + } + + public async readOrganization(queryRunner: QueryRunner) { + return await queryRunner.manager.find(Organization) + } + + public createNewOrganization(data: Partial, queryRunner: QueryRunner, isRegister: boolean = false) { + this.validateOrganizationName(data.name, isRegister) + data.updatedBy = data.createdBy + data.id = generateId() + + return queryRunner.manager.create(Organization, data) + } + + public async saveOrganization(data: Partial, queryRunner: QueryRunner) { + return await queryRunner.manager.save(Organization, data) + } + + public async createOrganization(data: Partial) { + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + + const user = await this.userService.readUserById(data.createdBy, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + + let newOrganization = this.createNewOrganization(data, queryRunner) + try { + await queryRunner.startTransaction() + newOrganization = await this.saveOrganization(newOrganization, queryRunner) + await queryRunner.commitTransaction() + } catch (error) { + await queryRunner.rollbackTransaction() + throw error + } finally { + await queryRunner.release() + } + + return newOrganization + } + + public async updateOrganization(newOrganizationData: Partial) { + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + + const oldOrganizationData = await this.readOrganizationById(newOrganizationData.id, queryRunner) + if (!oldOrganizationData) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationErrorMessage.ORGANIZATION_NOT_FOUND) + const user = await this.userService.readUserById(newOrganizationData.updatedBy, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + if (newOrganizationData.name) { + this.validateOrganizationName(newOrganizationData.name) + } + newOrganizationData.createdBy = oldOrganizationData.createdBy + + let updateOrganization = queryRunner.manager.merge(Organization, oldOrganizationData, newOrganizationData) + try { + await queryRunner.startTransaction() + await this.saveOrganization(updateOrganization, queryRunner) + await queryRunner.commitTransaction() + } catch (error) { + await queryRunner.rollbackTransaction() + throw error + } finally { + await queryRunner.release() + } + + return updateOrganization + } +} diff --git a/packages/server/src/enterprise/services/role.service.ts b/packages/server/src/enterprise/services/role.service.ts new file mode 100644 index 000000000..e2fed1b81 --- /dev/null +++ b/packages/server/src/enterprise/services/role.service.ts @@ -0,0 +1,169 @@ +import { StatusCodes } from 'http-status-codes' +import { DataSource, IsNull, QueryRunner } from 'typeorm' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { GeneralSuccessMessage } from '../../utils/constants' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { Role } from '../database/entities/role.entity' +import { WorkspaceUser } from '../database/entities/workspace-user.entity' +import { isInvalidName, isInvalidUUID } from '../utils/validation.util' +import { OrganizationErrorMessage, OrganizationService } from './organization.service' +import { UserErrorMessage, UserService } from './user.service' + +export const enum RoleErrorMessage { + INVALID_ROLE_ID = 'Invalid Role Id', + INVALID_ROLE_NAME = 'Invalid Role Name', + INVALID_ROLE_PERMISSIONS = 'Invalid Role Permissions', + ROLE_NOT_FOUND = 'Role Not Found' +} + +export class RoleService { + private dataSource: DataSource + private userService: UserService + private organizationService: OrganizationService + + constructor() { + const appServer = getRunningExpressApp() + this.dataSource = appServer.AppDataSource + this.userService = new UserService() + this.organizationService = new OrganizationService() + } + + public validateRoleId(id: string | undefined) { + if (isInvalidUUID(id)) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, RoleErrorMessage.INVALID_ROLE_ID) + } + + public async readRoleById(id: string | undefined, queryRunner: QueryRunner) { + this.validateRoleId(id) + return await queryRunner.manager.findOneBy(Role, { id }) + } + + public validateRoleName(name: string | undefined) { + if (isInvalidName(name)) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, RoleErrorMessage.INVALID_ROLE_NAME) + } + + public async readRoleByOrganizationId(organizationId: string | undefined, queryRunner: QueryRunner) { + const organization = await this.organizationService.readOrganizationById(organizationId, queryRunner) + if (!organization) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationErrorMessage.ORGANIZATION_NOT_FOUND) + + const roles = await queryRunner.manager.findBy(Role, { organizationId }) + return await Promise.all( + roles.map(async (role) => { + const workspaceUser = await queryRunner.manager.findBy(WorkspaceUser, { roleId: role.id }) + const userCount = workspaceUser.length + return { ...role, userCount } as Role & { userCount: number } + }) + ) + } + + public async readRoleByRoleIdOrganizationId(id: string | undefined, organizationId: string | undefined, queryRunner: QueryRunner) { + this.validateRoleId(id) + const organization = await this.organizationService.readOrganizationById(organizationId, queryRunner) + if (!organization) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationErrorMessage.ORGANIZATION_NOT_FOUND) + + return await queryRunner.manager.findOneBy(Role, { id, organizationId }) + } + + public async readGeneralRoleByName(name: string | undefined, queryRunner: QueryRunner) { + this.validateRoleName(name) + const generalRole = await queryRunner.manager.findOneBy(Role, { name, organizationId: IsNull() }) + if (!generalRole) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + return generalRole + } + + public async readRoleIsGeneral(id: string | undefined, queryRunner: QueryRunner) { + this.validateRoleId(id) + return await queryRunner.manager.findOneBy(Role, { id, organizationId: IsNull() }) + } + + public async readRoleByGeneral(queryRunner: QueryRunner) { + const generalRoles = await queryRunner.manager.find(Role, { where: { organizationId: IsNull() } }) + if (generalRoles.length <= 0) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + return generalRoles + } + + public async readRole(queryRunner: QueryRunner) { + return await queryRunner.manager.find(Role) + } + + public async saveRole(data: Partial, queryRunner: QueryRunner) { + return await queryRunner.manager.save(Role, data) + } + + public async createRole(data: Partial) { + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + + const user = await this.userService.readUserById(data.createdBy, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + const organization = await this.organizationService.readOrganizationById(data.organizationId, queryRunner) + if (!organization) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationErrorMessage.ORGANIZATION_NOT_FOUND) + this.validateRoleName(data.name) + if (!data.permissions) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, RoleErrorMessage.INVALID_ROLE_PERMISSIONS) + data.updatedBy = data.createdBy + + let newRole = queryRunner.manager.create(Role, data) + try { + await queryRunner.startTransaction() + newRole = await this.saveRole(newRole, queryRunner) + await queryRunner.commitTransaction() + } catch (error) { + await queryRunner.rollbackTransaction() + throw error + } finally { + await queryRunner.release() + } + + return newRole + } + + public async updateRole(newRole: Partial) { + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + + const oldRole = await this.readRoleById(newRole.id, queryRunner) + if (!oldRole) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + const user = await this.userService.readUserById(newRole.updatedBy, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + if (newRole.name) this.validateRoleName(newRole.name) + newRole.organizationId = oldRole.organizationId + newRole.createdBy = oldRole.createdBy + + let updateRole = queryRunner.manager.merge(Role, oldRole, newRole) + try { + await queryRunner.startTransaction() + updateRole = await this.saveRole(updateRole, queryRunner) + await queryRunner.commitTransaction() + } catch (error) { + await queryRunner.rollbackTransaction() + throw error + } finally { + await queryRunner.release() + } + + return updateRole + } + + public async deleteRole(organizationId: string | undefined, roleId: string | undefined) { + const queryRunner = this.dataSource.createQueryRunner() + try { + await queryRunner.connect() + + const role = await this.readRoleByRoleIdOrganizationId(roleId, organizationId, queryRunner) + if (!role) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + + await queryRunner.startTransaction() + + await queryRunner.manager.delete(WorkspaceUser, { roleId }) + await queryRunner.manager.delete(Role, { id: roleId }) + + await queryRunner.commitTransaction() + + return { message: GeneralSuccessMessage.DELETED } + } catch (error) { + if (queryRunner.isTransactionActive) await queryRunner.rollbackTransaction() + throw error + } finally { + if (!queryRunner.isReleased) await queryRunner.release() + } + } +} diff --git a/packages/server/src/enterprise/services/user.service.ts b/packages/server/src/enterprise/services/user.service.ts new file mode 100644 index 000000000..991f6644c --- /dev/null +++ b/packages/server/src/enterprise/services/user.service.ts @@ -0,0 +1,197 @@ +import { StatusCodes } from 'http-status-codes' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { Telemetry, TelemetryEventType } from '../../utils/telemetry' +import { User, UserStatus } from '../database/entities/user.entity' +import { isInvalidEmail, isInvalidName, isInvalidPassword, isInvalidUUID } from '../utils/validation.util' +import { DataSource, ILike, QueryRunner } from 'typeorm' +import { generateId } from '../../utils' +import { GeneralErrorMessage } from '../../utils/constants' +import { compareHash, getHash } from '../utils/encryption.util' +import { sanitizeUser } from '../../utils/sanitize.util' +import { destroyAllSessionsForUser } from '../middleware/passport/SessionPersistance' + +export const enum UserErrorMessage { + EXPIRED_TEMP_TOKEN = 'Expired Temporary Token', + INVALID_TEMP_TOKEN = 'Invalid Temporary Token', + INVALID_USER_ID = 'Invalid User Id', + INVALID_USER_EMAIL = 'Invalid User Email', + INVALID_USER_CREDENTIAL = 'Invalid User Credential', + INVALID_USER_NAME = 'Invalid User Name', + INVALID_USER_TYPE = 'Invalid User Type', + INVALID_USER_STATUS = 'Invalid User Status', + USER_EMAIL_ALREADY_EXISTS = 'User Email Already Exists', + USER_EMAIL_UNVERIFIED = 'User Email Unverified', + USER_NOT_FOUND = 'User Not Found', + USER_FOUND_MULTIPLE = 'User Found Multiple', + INCORRECT_USER_EMAIL_OR_CREDENTIALS = 'Incorrect Email or Password', + PASSWORDS_DO_NOT_MATCH = 'Passwords do not match' +} +export class UserService { + private telemetry: Telemetry + private dataSource: DataSource + + constructor() { + const appServer = getRunningExpressApp() + this.dataSource = appServer.AppDataSource + this.telemetry = appServer.telemetry + } + + public validateUserId(id: string | undefined) { + if (isInvalidUUID(id)) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, UserErrorMessage.INVALID_USER_ID) + } + + public async readUserById(id: string | undefined, queryRunner: QueryRunner) { + this.validateUserId(id) + return await queryRunner.manager.findOneBy(User, { id }) + } + + public validateUserName(name: string | undefined) { + if (isInvalidName(name)) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, UserErrorMessage.INVALID_USER_NAME) + } + + public validateUserEmail(email: string | undefined) { + if (isInvalidEmail(email)) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, UserErrorMessage.INVALID_USER_EMAIL) + } + + public async readUserByEmail(email: string | undefined, queryRunner: QueryRunner) { + if (!email) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, UserErrorMessage.INVALID_USER_EMAIL) + this.validateUserEmail(email) + return await queryRunner.manager.findOneBy(User, { email: ILike(email) }) + } + + public async readUserByToken(token: string | undefined, queryRunner: QueryRunner) { + return await queryRunner.manager.findOneBy(User, { tempToken: token }) + } + + public validateUserStatus(status: string | undefined) { + if (status && !Object.values(UserStatus).includes(status as UserStatus)) + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, UserErrorMessage.INVALID_USER_STATUS) + } + + public async readUser(queryRunner: QueryRunner) { + return await queryRunner.manager.find(User) + } + + public encryptUserCredential(credential: string | undefined) { + if (!credential || isInvalidPassword(credential)) + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, GeneralErrorMessage.INVALID_PASSWORD) + return getHash(credential) + } + + public async createNewUser(data: Partial, queryRunner: QueryRunner) { + const user = await this.readUserByEmail(data.email, queryRunner) + if (user) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, UserErrorMessage.USER_EMAIL_ALREADY_EXISTS) + if (data.credential) data.credential = this.encryptUserCredential(data.credential) + if (!data.name) data.name = data.email + this.validateUserName(data.name) + if (data.status) this.validateUserStatus(data.status) + + data.id = generateId() + if (data.createdBy) { + const createdBy = await this.readUserById(data.createdBy, queryRunner) + if (!createdBy) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + data.createdBy = createdBy.id + data.updatedBy = data.createdBy + } else { + data.createdBy = data.id + data.updatedBy = data.id + } + + const userObj = queryRunner.manager.create(User, data) + + this.telemetry.sendTelemetry( + TelemetryEventType.USER_CREATED, + { + userId: userObj.id, + createdBy: userObj.createdBy + }, + userObj.id + ) + + return userObj + } + + public async saveUser(data: Partial, queryRunner: QueryRunner) { + return await queryRunner.manager.save(User, data) + } + + public async createUser(data: Partial) { + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + + let newUser = await this.createNewUser(data, queryRunner) + try { + await queryRunner.startTransaction() + newUser = await this.saveUser(newUser, queryRunner) + await queryRunner.commitTransaction() + } catch (error) { + await queryRunner.rollbackTransaction() + throw error + } finally { + await queryRunner.release() + } + + return newUser + } + + public async updateUser(newUserData: Partial & { oldPassword?: string; newPassword?: string; confirmPassword?: string }) { + let queryRunner: QueryRunner | undefined + let updatedUser: Partial + try { + queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + const oldUserData = await this.readUserById(newUserData.id, queryRunner) + if (!oldUserData) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + + if (newUserData.updatedBy) { + const updateUserData = await this.readUserById(newUserData.updatedBy, queryRunner) + if (!updateUserData) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + } + + newUserData.createdBy = oldUserData.createdBy + + if (newUserData.name) { + this.validateUserName(newUserData.name) + } + + if (newUserData.status) { + this.validateUserStatus(newUserData.status) + } + + if (newUserData.oldPassword && newUserData.newPassword && newUserData.confirmPassword) { + if (!oldUserData.credential) { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, UserErrorMessage.INVALID_USER_CREDENTIAL) + } + // verify old password + if (!compareHash(newUserData.oldPassword, oldUserData.credential)) { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, UserErrorMessage.INVALID_USER_CREDENTIAL) + } + if (newUserData.newPassword !== newUserData.confirmPassword) { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, UserErrorMessage.PASSWORDS_DO_NOT_MATCH) + } + const hash = getHash(newUserData.newPassword) + newUserData.credential = hash + newUserData.tempToken = '' + newUserData.tokenExpiry = undefined + } + + updatedUser = queryRunner.manager.merge(User, oldUserData, newUserData) + await queryRunner.startTransaction() + await this.saveUser(updatedUser, queryRunner) + await queryRunner.commitTransaction() + + // Invalidate all sessions for this user if password was changed + if (newUserData.oldPassword && newUserData.newPassword && newUserData.confirmPassword) { + await destroyAllSessionsForUser(updatedUser.id as string) + } + } catch (error) { + if (queryRunner && queryRunner.isTransactionActive) await queryRunner.rollbackTransaction() + throw error + } finally { + if (queryRunner && !queryRunner.isReleased) await queryRunner.release() + } + + return sanitizeUser(updatedUser) + } +} diff --git a/packages/server/src/enterprise/services/workspace-user.service.ts b/packages/server/src/enterprise/services/workspace-user.service.ts new file mode 100644 index 000000000..2b4f17726 --- /dev/null +++ b/packages/server/src/enterprise/services/workspace-user.service.ts @@ -0,0 +1,386 @@ +import { StatusCodes } from 'http-status-codes' +import { DataSource, QueryRunner } from 'typeorm' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { GeneralErrorMessage, GeneralSuccessMessage } from '../../utils/constants' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { OrganizationUser } from '../database/entities/organization-user.entity' +import { GeneralRole } from '../database/entities/role.entity' +import { WorkspaceUser, WorkspaceUserStatus } from '../database/entities/workspace-user.entity' +import { Workspace } from '../database/entities/workspace.entity' +import { isInvalidDateTime } from '../utils/validation.util' +import { OrganizationUserErrorMessage } from './organization-user.service' +import { OrganizationErrorMessage, OrganizationService } from './organization.service' +import { RoleErrorMessage, RoleService } from './role.service' +import { UserErrorMessage, UserService } from './user.service' +import { WorkspaceErrorMessage, WorkspaceService } from './workspace.service' + +export const enum WorkspaceUserErrorMessage { + INVALID_WORKSPACE_USER_SATUS = 'Invalid Workspace User Status', + INVALID_WORKSPACE_USER_LASTLOGIN = 'Invalid Workspace User LastLogin', + WORKSPACE_USER_ALREADY_EXISTS = 'Workspace User Already Exists', + WORKSPACE_USER_NOT_FOUND = 'Workspace User Not Found' +} + +export class WorkspaceUserService { + private dataSource: DataSource + private userService: UserService + private workspaceService: WorkspaceService + private roleService: RoleService + private organizationService: OrganizationService + + constructor() { + const appServer = getRunningExpressApp() + this.dataSource = appServer.AppDataSource + this.userService = new UserService() + this.workspaceService = new WorkspaceService() + this.roleService = new RoleService() + this.organizationService = new OrganizationService() + } + + public validateWorkspaceUserStatus(status: string | undefined) { + if (status && !Object.values(WorkspaceUserStatus).includes(status as WorkspaceUserStatus)) + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, WorkspaceUserErrorMessage.INVALID_WORKSPACE_USER_SATUS) + } + + public validateWorkspaceUserLastLogin(lastLogin: string | undefined) { + if (isInvalidDateTime(lastLogin)) + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, WorkspaceUserErrorMessage.INVALID_WORKSPACE_USER_LASTLOGIN) + } + + public async readWorkspaceUserByWorkspaceIdUserId( + workspaceId: string | undefined, + userId: string | undefined, + queryRunner: QueryRunner + ) { + const workspace = await this.workspaceService.readWorkspaceById(workspaceId, queryRunner) + if (!workspace) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, WorkspaceErrorMessage.WORKSPACE_NOT_FOUND) + const user = await this.userService.readUserById(userId, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + const ownerRole = await this.roleService.readGeneralRoleByName(GeneralRole.OWNER, queryRunner) + + const workspaceUser = await queryRunner.manager + .createQueryBuilder(WorkspaceUser, 'workspaceUser') + .innerJoinAndSelect('workspaceUser.role', 'role') + .where('workspaceUser.workspaceId = :workspaceId', { workspaceId }) + .andWhere('workspaceUser.userId = :userId', { userId }) + .getOne() + + return { + workspace, + workspaceUser: workspaceUser + ? { + ...workspaceUser, + isOrgOwner: workspaceUser.roleId === ownerRole?.id + } + : null + } + } + + public async readWorkspaceUserByWorkspaceId(workspaceId: string | undefined, queryRunner: QueryRunner) { + const workspace = await this.workspaceService.readWorkspaceById(workspaceId, queryRunner) + if (!workspace) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, WorkspaceErrorMessage.WORKSPACE_NOT_FOUND) + const ownerRole = await this.roleService.readGeneralRoleByName(GeneralRole.OWNER, queryRunner) + + const workspaceUsers = await queryRunner.manager + .createQueryBuilder(WorkspaceUser, 'workspaceUser') + .innerJoinAndSelect('workspaceUser.role', 'role') + .innerJoinAndSelect('workspaceUser.user', 'user') + .where('workspaceUser.workspaceId = :workspaceId', { workspaceId }) + .getMany() + + return workspaceUsers.map((workspaceUser) => { + delete workspaceUser.user.credential + delete workspaceUser.user.tempToken + delete workspaceUser.user.tokenExpiry + return { + ...workspaceUser, + isOrgOwner: workspaceUser.roleId === ownerRole?.id + } + }) + } + + public async readWorkspaceUserByUserId(userId: string | undefined, queryRunner: QueryRunner) { + const user = await this.userService.readUserById(userId, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + const ownerRole = await this.roleService.readGeneralRoleByName(GeneralRole.OWNER, queryRunner) + + const workspaceUsers = await queryRunner.manager + .createQueryBuilder(WorkspaceUser, 'workspaceUser') + .innerJoinAndSelect('workspaceUser.workspace', 'workspace') + .innerJoinAndSelect('workspaceUser.role', 'role') + .where('workspaceUser.userId = :userId', { userId }) + .getMany() + + return workspaceUsers.map((user) => ({ + ...user, + isOrgOwner: user.roleId === ownerRole?.id + })) + } + + public async readWorkspaceUserByOrganizationIdUserId( + organizationId: string | undefined, + userId: string | undefined, + queryRunner: QueryRunner + ) { + const user = await this.userService.readUserById(userId, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + const organization = await this.organizationService.readOrganizationById(organizationId, queryRunner) + if (!organization) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationErrorMessage.ORGANIZATION_NOT_FOUND) + const ownerRole = await this.roleService.readGeneralRoleByName(GeneralRole.OWNER, queryRunner) + + const workspaceUsers = await queryRunner.manager + .createQueryBuilder(WorkspaceUser, 'workspaceUser') + .innerJoinAndSelect('workspaceUser.workspace', 'workspace') + .innerJoinAndSelect('workspaceUser.role', 'role') + .where('workspace.organizationId = :organizationId', { organizationId }) + .andWhere('workspaceUser.userId = :userId', { userId }) + .getMany() + + return workspaceUsers.map((user) => ({ + ...user, + isOrgOwner: user.roleId === ownerRole?.id + })) + } + + public async readWorkspaceUserByOrganizationId(organizationId: string | undefined, queryRunner: QueryRunner) { + const organization = await this.organizationService.readOrganizationById(organizationId, queryRunner) + if (!organization) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationErrorMessage.ORGANIZATION_NOT_FOUND) + const ownerRole = await this.roleService.readGeneralRoleByName(GeneralRole.OWNER, queryRunner) + + const workspaceUsers = await queryRunner.manager + .createQueryBuilder(WorkspaceUser, 'workspaceUser') + .innerJoinAndSelect('workspaceUser.workspace', 'workspace') + .innerJoinAndSelect('workspaceUser.user', 'user') + .innerJoinAndSelect('workspaceUser.role', 'role') + .where('workspace.organizationId = :organizationId', { organizationId }) + .getMany() + + return workspaceUsers.map((user) => ({ + ...user, + isOrgOwner: user.roleId === ownerRole?.id + })) + } + + public async readWorkspaceUserByRoleId(roleId: string | undefined, queryRunner: QueryRunner) { + const role = await this.roleService.readRoleById(roleId, queryRunner) + if (!role) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + const ownerRole = await this.roleService.readGeneralRoleByName(GeneralRole.OWNER, queryRunner) + + const workspaceUsers = await queryRunner.manager + .createQueryBuilder(WorkspaceUser, 'workspaceUser') + .innerJoinAndSelect('workspaceUser.workspace', 'workspace') + .innerJoinAndSelect('workspaceUser.user', 'user') + .innerJoinAndSelect('workspaceUser.role', 'role') + .where('workspaceUser.roleId = :roleId', { roleId }) + .getMany() + + return workspaceUsers.map((workspaceUser) => { + delete workspaceUser.user.credential + delete workspaceUser.user.tempToken + delete workspaceUser.user.tokenExpiry + return { + ...workspaceUser, + isOrgOwner: workspaceUser.roleId === ownerRole?.id + } + }) + } + + public async readWorkspaceUserByLastLogin(userId: string | undefined, queryRunner: QueryRunner) { + const user = await this.userService.readUserById(userId, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + const ownerRole = await this.roleService.readGeneralRoleByName(GeneralRole.OWNER, queryRunner) + + let workspaceUser = await queryRunner.manager + .createQueryBuilder(WorkspaceUser, 'workspaceUser') + .innerJoinAndSelect('workspaceUser.workspace', 'workspace') + .innerJoinAndSelect('workspaceUser.role', 'role') + .where('workspaceUser.userId = :userId', { userId }) + .andWhere('workspaceUser.lastLogin IS NOT NULL') + .orderBy('workspaceUser.lastLogin', 'DESC') + .take(1) + .getOne() + + if (!workspaceUser) return await this.readWorkspaceUserByUserId(userId, queryRunner) + + return { + ...workspaceUser, + isOrgOwner: workspaceUser.roleId === ownerRole?.id + } + } + + public createNewWorkspaceUser(data: Partial, queryRunner: QueryRunner) { + if (data.status) this.validateWorkspaceUserStatus(data.status) + data.updatedBy = data.createdBy + + return queryRunner.manager.create(WorkspaceUser, data) + } + + public async saveWorkspaceUser(data: Partial, queryRunner: QueryRunner) { + return await queryRunner.manager.save(WorkspaceUser, data) + } + + public async createWorkspaceUser(data: Partial) { + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + + const { workspace, workspaceUser } = await this.readWorkspaceUserByWorkspaceIdUserId(data.workspaceId, data.userId, queryRunner) + if (workspaceUser) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, WorkspaceUserErrorMessage.WORKSPACE_USER_ALREADY_EXISTS) + const role = await this.roleService.readRoleById(data.roleId, queryRunner) + if (!role) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + const createdBy = await this.userService.readUserById(data.createdBy, queryRunner) + if (!createdBy) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + + let newWorkspaceUser = this.createNewWorkspaceUser(data, queryRunner) + workspace.updatedBy = data.createdBy + try { + await queryRunner.startTransaction() + newWorkspaceUser = await this.saveWorkspaceUser(newWorkspaceUser, queryRunner) + await this.workspaceService.saveWorkspace(workspace, queryRunner) + await this.roleService.saveRole(role, queryRunner) + await queryRunner.commitTransaction() + } catch (error) { + await queryRunner.rollbackTransaction() + throw error + } finally { + await queryRunner.release() + } + + return newWorkspaceUser + } + + public async createWorkspace(data: Partial) { + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + + const organization = await this.organizationService.readOrganizationById(data.organizationId, queryRunner) + if (!organization) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationErrorMessage.ORGANIZATION_NOT_FOUND) + + const user = await this.userService.readUserById(data.createdBy, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + + let organizationUser = await queryRunner.manager.findOneBy(OrganizationUser, { organizationId: organization.id, userId: user.id }) + if (!organizationUser) + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationUserErrorMessage.ORGANIZATION_USER_NOT_FOUND) + organizationUser.updatedBy = user.id + + let newWorkspace = this.workspaceService.createNewWorkspace(data, queryRunner) + + const ownerRole = await this.roleService.readGeneralRoleByName(GeneralRole.OWNER, queryRunner) + if (!ownerRole) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + + const role = await this.roleService.readRoleById(organizationUser.roleId, queryRunner) + if (!role) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + + // Add org admin as workspace owner if the user creating the workspace is NOT the org admin + const orgAdmin = await queryRunner.manager.findOneBy(OrganizationUser, { + organizationId: organization.id, + roleId: ownerRole.id + }) + if (!orgAdmin) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationUserErrorMessage.ORGANIZATION_USER_NOT_FOUND) + + let isCreateWorkSpaceUserOrgAdmin = false + if (orgAdmin.userId === user.id) { + isCreateWorkSpaceUserOrgAdmin = true + } + + let orgAdminUser: Partial = { + workspaceId: newWorkspace.id, + roleId: ownerRole.id, + userId: orgAdmin.userId, + createdBy: orgAdmin.userId + } + if (!isCreateWorkSpaceUserOrgAdmin) orgAdminUser = this.createNewWorkspaceUser(orgAdminUser, queryRunner) + + let newWorkspaceUser: Partial = { + workspaceId: newWorkspace.id, + roleId: role.id, + userId: user.id, + createdBy: user.id + } + // If user creating the workspace is an invited user, not the organization admin, inherit the role from existingWorkspaceId + if ((data as any).existingWorkspaceId) { + const existingWorkspaceUser = await queryRunner.manager.findOneBy(WorkspaceUser, { + workspaceId: (data as any).existingWorkspaceId, + userId: user.id + }) + if (existingWorkspaceUser) { + newWorkspaceUser.roleId = existingWorkspaceUser.roleId + } + } + + newWorkspaceUser = this.createNewWorkspaceUser(newWorkspaceUser, queryRunner) + + try { + await queryRunner.startTransaction() + newWorkspace = await this.workspaceService.saveWorkspace(newWorkspace, queryRunner) + if (!isCreateWorkSpaceUserOrgAdmin) await this.saveWorkspaceUser(orgAdminUser, queryRunner) + await this.saveWorkspaceUser(newWorkspaceUser, queryRunner) + await queryRunner.manager.save(OrganizationUser, organizationUser) + await queryRunner.commitTransaction() + } catch (error) { + await queryRunner.rollbackTransaction() + throw error + } finally { + await queryRunner.release() + } + + return newWorkspace + } + + public async updateWorkspaceUser(newWorkspaserUser: Partial, queryRunner: QueryRunner) { + const { workspaceUser } = await this.readWorkspaceUserByWorkspaceIdUserId( + newWorkspaserUser.workspaceId, + newWorkspaserUser.userId, + queryRunner + ) + if (!workspaceUser) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, WorkspaceUserErrorMessage.WORKSPACE_USER_NOT_FOUND) + if (newWorkspaserUser.roleId && workspaceUser.role) { + const role = await this.roleService.readRoleById(newWorkspaserUser.roleId, queryRunner) + if (!role) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + // check if the role is from the same organization + if (role.organizationId !== workspaceUser.role.organizationId) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + } + // delete role, the new role will be created again, with the new roleId (newWorkspaserUser.roleId) + if (workspaceUser.role) delete workspaceUser.role + } + const updatedBy = await this.userService.readUserById(newWorkspaserUser.updatedBy, queryRunner) + if (!updatedBy) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + if (newWorkspaserUser.status) this.validateWorkspaceUserStatus(newWorkspaserUser.status) + if (newWorkspaserUser.lastLogin) this.validateWorkspaceUserLastLogin(newWorkspaserUser.lastLogin) + newWorkspaserUser.createdBy = workspaceUser.createdBy + + let updataWorkspaceUser = queryRunner.manager.merge(WorkspaceUser, workspaceUser, newWorkspaserUser) + updataWorkspaceUser = await this.saveWorkspaceUser(updataWorkspaceUser, queryRunner) + + return updataWorkspaceUser + } + + public async deleteWorkspaceUser(workspaceId: string | undefined, userId: string | undefined) { + const queryRunner = this.dataSource.createQueryRunner() + try { + await queryRunner.connect() + const { workspace, workspaceUser } = await this.readWorkspaceUserByWorkspaceIdUserId(workspaceId, userId, queryRunner) + if (!workspaceUser) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, WorkspaceUserErrorMessage.WORKSPACE_USER_NOT_FOUND) + const role = await this.roleService.readRoleById(workspaceUser.roleId, queryRunner) + if (!role) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + if (role.name === GeneralRole.OWNER) + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, GeneralErrorMessage.NOT_ALLOWED_TO_DELETE_OWNER) + + await queryRunner.startTransaction() + + await queryRunner.manager.delete(WorkspaceUser, { workspaceId, userId }) + await this.roleService.saveRole(role, queryRunner) + await this.workspaceService.saveWorkspace(workspace, queryRunner) + + await queryRunner.commitTransaction() + + return { message: GeneralSuccessMessage.DELETED } + } catch (error) { + if (queryRunner.isTransactionActive) await queryRunner.rollbackTransaction() + throw error + } finally { + if (!queryRunner.isReleased) await queryRunner.release() + } + } +} diff --git a/packages/server/src/enterprise/services/workspace.service.ts b/packages/server/src/enterprise/services/workspace.service.ts new file mode 100644 index 000000000..fb8ec468e --- /dev/null +++ b/packages/server/src/enterprise/services/workspace.service.ts @@ -0,0 +1,327 @@ +import { StatusCodes } from 'http-status-codes' +import { DataSource, EntityManager, In, IsNull, QueryRunner, UpdateResult } from 'typeorm' +import { ApiKey } from '../../database/entities/ApiKey' +import { Assistant } from '../../database/entities/Assistant' +import { ChatFlow } from '../../database/entities/ChatFlow' +import { ChatMessage } from '../../database/entities/ChatMessage' +import { ChatMessageFeedback } from '../../database/entities/ChatMessageFeedback' +import { Credential } from '../../database/entities/Credential' +import { CustomTemplate } from '../../database/entities/CustomTemplate' +import { Dataset } from '../../database/entities/Dataset' +import { DatasetRow } from '../../database/entities/DatasetRow' +import { DocumentStore } from '../../database/entities/DocumentStore' +import { DocumentStoreFileChunk } from '../../database/entities/DocumentStoreFileChunk' +import { Evaluation } from '../../database/entities/Evaluation' +import { EvaluationRun } from '../../database/entities/EvaluationRun' +import { Evaluator } from '../../database/entities/Evaluator' +import { Execution } from '../../database/entities/Execution' +import { Tool } from '../../database/entities/Tool' +import { UpsertHistory } from '../../database/entities/UpsertHistory' +import { Variable } from '../../database/entities/Variable' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { generateId } from '../../utils' +import { GeneralSuccessMessage } from '../../utils/constants' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { WorkspaceShared } from '../database/entities/EnterpriseEntities' +import { GeneralRole } from '../database/entities/role.entity' +import { WorkspaceUser } from '../database/entities/workspace-user.entity' +import { Workspace, WorkspaceName } from '../database/entities/workspace.entity' +import { isInvalidName, isInvalidUUID } from '../utils/validation.util' +import { OrganizationErrorMessage, OrganizationService } from './organization.service' +import { RoleErrorMessage, RoleService } from './role.service' +import { UserErrorMessage, UserService } from './user.service' + +export const enum WorkspaceErrorMessage { + INVALID_WORKSPACE_ID = 'Invalid Workspace Id', + INVALID_WORKSPACE_NAME = 'Invalid Workspace Name', + WORKSPACE_NOT_FOUND = 'Workspace Not Found', + WORKSPACE_RESERVERD_NAME = 'Workspace name cannot be Default Workspace or Personal Workspace - this is a reserved name' +} + +export class WorkspaceService { + private dataSource: DataSource + private userService: UserService + private organizationService: OrganizationService + private roleService: RoleService + + constructor() { + const appServer = getRunningExpressApp() + this.dataSource = appServer.AppDataSource + this.userService = new UserService() + this.organizationService = new OrganizationService() + this.roleService = new RoleService() + } + + public validateWorkspaceId(id: string | undefined) { + if (isInvalidUUID(id)) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, WorkspaceErrorMessage.INVALID_WORKSPACE_ID) + } + + public async readWorkspaceById(id: string | undefined, queryRunner: QueryRunner) { + this.validateWorkspaceId(id) + return await queryRunner.manager.findOneBy(Workspace, { id }) + } + + public validateWorkspaceName(name: string | undefined, isRegister: boolean = false) { + if (isInvalidName(name)) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, WorkspaceErrorMessage.INVALID_WORKSPACE_NAME) + if (!isRegister && (name === WorkspaceName.DEFAULT_PERSONAL_WORKSPACE || name === WorkspaceName.DEFAULT_WORKSPACE)) { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, WorkspaceErrorMessage.WORKSPACE_RESERVERD_NAME) + } + } + + public async readWorkspaceByOrganizationId(organizationId: string | undefined, queryRunner: QueryRunner) { + await this.organizationService.readOrganizationById(organizationId, queryRunner) + const workspaces = await queryRunner.manager.findBy(Workspace, { organizationId }) + + const rolePersonalWorkspace = await this.roleService.readGeneralRoleByName(GeneralRole.PERSONAL_WORKSPACE, queryRunner) + if (!rolePersonalWorkspace) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + + const filteredWorkspaces = await Promise.all( + workspaces.map(async (workspace) => { + const workspaceUsers = await queryRunner.manager.findBy(WorkspaceUser, { workspaceId: workspace.id }) + + // Skip if any user in the workspace has PERSONAL_WORKSPACE role + const hasPersonalWorkspaceUser = workspaceUsers.some((user) => user.roleId === rolePersonalWorkspace.id) + if (hasPersonalWorkspaceUser) { + return null + } + + return { + ...workspace, + userCount: workspaceUsers.length + } as Workspace & { userCount: number } + }) + ) + + // Filter out null values (personal workspaces) + return filteredWorkspaces.filter((workspace): workspace is Workspace & { userCount: number } => workspace !== null) + } + + public createNewWorkspace(data: Partial, queryRunner: QueryRunner, isRegister: boolean = false) { + this.validateWorkspaceName(data.name, isRegister) + data.updatedBy = data.createdBy + data.id = generateId() + + return queryRunner.manager.create(Workspace, data) + } + + public async saveWorkspace(data: Partial, queryRunner: QueryRunner) { + return await queryRunner.manager.save(Workspace, data) + } + + public async createWorkspace(data: Partial) { + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + + const organization = await this.organizationService.readOrganizationById(data.organizationId, queryRunner) + if (!organization) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationErrorMessage.ORGANIZATION_NOT_FOUND) + const user = await this.userService.readUserById(data.createdBy, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + + let newWorkspace = this.createNewWorkspace(data, queryRunner) + try { + await queryRunner.startTransaction() + newWorkspace = await this.saveWorkspace(newWorkspace, queryRunner) + await queryRunner.commitTransaction() + } catch (error) { + await queryRunner.rollbackTransaction() + throw error + } finally { + await queryRunner.release() + } + + return newWorkspace + } + + public async updateWorkspace(newWorkspaceData: Partial) { + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + + const oldWorkspaceData = await this.readWorkspaceById(newWorkspaceData.id, queryRunner) + if (!oldWorkspaceData) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, WorkspaceErrorMessage.WORKSPACE_NOT_FOUND) + const user = await this.userService.readUserById(newWorkspaceData.updatedBy, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + if (newWorkspaceData.name) { + this.validateWorkspaceName(newWorkspaceData.name) + } + newWorkspaceData.organizationId = oldWorkspaceData.organizationId + newWorkspaceData.createdBy = oldWorkspaceData.createdBy + + let updateWorkspace = queryRunner.manager.merge(Workspace, oldWorkspaceData, newWorkspaceData) + try { + await queryRunner.startTransaction() + updateWorkspace = await this.saveWorkspace(updateWorkspace, queryRunner) + await queryRunner.commitTransaction() + } catch (error) { + await queryRunner.rollbackTransaction() + throw error + } finally { + await queryRunner.release() + } + + return updateWorkspace + } + + public async deleteWorkspaceById(queryRunner: QueryRunner, workspaceId: string) { + const workspace = await this.readWorkspaceById(workspaceId, queryRunner) + if (!workspace) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, WorkspaceErrorMessage.WORKSPACE_NOT_FOUND) + + // First get all related entities that need to be deleted + const chatflows = await queryRunner.manager.findBy(ChatFlow, { workspaceId }) + const documentStores = await queryRunner.manager.findBy(DocumentStore, { workspaceId }) + const evaluations = await queryRunner.manager.findBy(Evaluation, { workspaceId }) + const datasets = await queryRunner.manager.findBy(Dataset, { workspaceId }) + + // Extract IDs for bulk deletion + const chatflowIds = chatflows.map((cf) => cf.id) + const documentStoreIds = documentStores.map((ds) => ds.id) + const evaluationIds = evaluations.map((e) => e.id) + const datasetIds = datasets.map((d) => d.id) + + // Start deleting in the correct order to maintain referential integrity + await queryRunner.manager.delete(WorkspaceUser, { workspaceId }) + await queryRunner.manager.delete(ApiKey, { workspaceId }) + await queryRunner.manager.delete(Assistant, { workspaceId }) + await queryRunner.manager.delete(Execution, { workspaceId }) + + // Delete chatflow related entities + if (chatflowIds.length > 0) { + await queryRunner.manager.delete(ChatFlow, { workspaceId }) + await queryRunner.manager.delete(ChatMessageFeedback, { chatflowid: In(chatflowIds) }) + await queryRunner.manager.delete(ChatMessage, { chatflowid: In(chatflowIds) }) + await queryRunner.manager.delete(UpsertHistory, { chatflowid: In(chatflowIds) }) + } + + await queryRunner.manager.delete(Credential, { workspaceId }) + await queryRunner.manager.delete(CustomTemplate, { workspaceId }) + + // Delete dataset related entities + if (datasetIds.length > 0) { + await queryRunner.manager.delete(Dataset, { workspaceId }) + await queryRunner.manager.delete(DatasetRow, { datasetId: In(datasetIds) }) + } + + // Delete document store related entities + if (documentStoreIds.length > 0) { + await queryRunner.manager.delete(DocumentStore, { workspaceId }) + await queryRunner.manager.delete(DocumentStoreFileChunk, { storeId: In(documentStoreIds) }) + } + + // Delete evaluation related entities + if (evaluationIds.length > 0) { + await queryRunner.manager.delete(Evaluation, { workspaceId }) + await queryRunner.manager.delete(EvaluationRun, { evaluationId: In(evaluationIds) }) + } + + await queryRunner.manager.delete(Evaluator, { workspaceId }) + await queryRunner.manager.delete(Tool, { workspaceId }) + await queryRunner.manager.delete(Variable, { workspaceId }) + await queryRunner.manager.delete(WorkspaceShared, { workspaceId }) + + // Finally delete the workspace itself + await queryRunner.manager.delete(Workspace, { id: workspaceId }) + + return workspace + } + + public async getSharedWorkspacesForItem(itemId: string) { + const sharedWorkspaces = await this.dataSource.getRepository(WorkspaceShared).find({ + where: { + sharedItemId: itemId + } + }) + if (sharedWorkspaces.length === 0) { + return [] + } + + const workspaceIds = sharedWorkspaces.map((ws) => ws.workspaceId) + const workspaces = await this.dataSource.getRepository(Workspace).find({ + select: ['id', 'name'], + where: { id: In(workspaceIds) } + }) + + return sharedWorkspaces.map((sw) => { + const workspace = workspaces.find((w) => w.id === sw.workspaceId) + return { + workspaceId: sw.workspaceId, + workspaceName: workspace?.name, + sharedItemId: sw.sharedItemId, + itemType: sw.itemType + } + }) + } + + public async getSharedItemsForWorkspace(wsId: string, itemType: string) { + const sharedItems = await this.dataSource.getRepository(WorkspaceShared).find({ + where: { + workspaceId: wsId, + itemType: itemType + } + }) + if (sharedItems.length === 0) { + return [] + } + + const itemIds = sharedItems.map((item) => item.sharedItemId) + if (itemType === 'credential') { + return await this.dataSource.getRepository(Credential).find({ + select: ['id', 'name', 'credentialName', 'createdDate', 'updatedDate', 'workspaceId'], + where: { id: In(itemIds) } + }) + } else if (itemType === 'custom_template') { + return await this.dataSource.getRepository(CustomTemplate).find({ + where: { id: In(itemIds) } + }) + } + return [] + } + + public async setSharedWorkspacesForItem(itemId: string, body: { itemType: string; workspaceIds: string[] }) { + const { itemType, workspaceIds } = body + + await this.dataSource.transaction(async (transactionalEntityManager: EntityManager) => { + // Delete existing shared workspaces for the item + await transactionalEntityManager.getRepository(WorkspaceShared).delete({ + sharedItemId: itemId + }) + + // Add new shared workspaces + const sharedWorkspaces = workspaceIds.map((workspaceId) => + transactionalEntityManager.getRepository(WorkspaceShared).create({ + workspaceId, + sharedItemId: itemId, + itemType + }) + ) + await transactionalEntityManager.getRepository(WorkspaceShared).save(sharedWorkspaces) + }) + + return { message: GeneralSuccessMessage.UPDATED } + } + + /** + * Updates all entities with null workspaceId to the specified workspaceId + * Used for migrating legacy data that was created before workspace implementation + * This function is guaranteed to return meaningful results with affected row counts + * @param queryRunner The TypeORM query runner to execute database operations + * @param workspaceId The target workspaceId to assign to records with null workspaceId + * @returns An array of update results, each containing the count of affected rows. + * The array will always contain results for each entity type in the following order: + * [ApiKey, Assistant, ChatFlow, Credential, CustomTemplate, Dataset, DocumentStore, Evaluation, Evaluator, Tool, Variable] + */ + public async setNullWorkspaceId(queryRunner: QueryRunner, workspaceId: string): Promise { + return await Promise.all([ + queryRunner.manager.update(ApiKey, { workspaceId: IsNull() }, { workspaceId }), + queryRunner.manager.update(Assistant, { workspaceId: IsNull() }, { workspaceId }), + queryRunner.manager.update(ChatFlow, { workspaceId: IsNull() }, { workspaceId }), + queryRunner.manager.update(Credential, { workspaceId: IsNull() }, { workspaceId }), + queryRunner.manager.update(CustomTemplate, { workspaceId: IsNull() }, { workspaceId }), + queryRunner.manager.update(Dataset, { workspaceId: IsNull() }, { workspaceId }), + queryRunner.manager.update(DocumentStore, { workspaceId: IsNull() }, { workspaceId }), + queryRunner.manager.update(Evaluation, { workspaceId: IsNull() }, { workspaceId }), + queryRunner.manager.update(Evaluator, { workspaceId: IsNull() }, { workspaceId }), + queryRunner.manager.update(Execution, { workspaceId: IsNull() }, { workspaceId }), + queryRunner.manager.update(Tool, { workspaceId: IsNull() }, { workspaceId }), + queryRunner.manager.update(Variable, { workspaceId: IsNull() }, { workspaceId }) + ]) + } +} diff --git a/packages/server/src/enterprise/sso/Auth0SSO.ts b/packages/server/src/enterprise/sso/Auth0SSO.ts new file mode 100644 index 000000000..07a551f06 --- /dev/null +++ b/packages/server/src/enterprise/sso/Auth0SSO.ts @@ -0,0 +1,160 @@ +// Auth0SSO.ts +import SSOBase from './SSOBase' +import passport from 'passport' +import { Profile, Strategy as Auth0Strategy } from 'passport-auth0' +import { Request } from 'express' +import auditService from '../services/audit' +import { ErrorMessage, LoggedInUser, LoginActivityCode } from '../Interface.Enterprise' +import { setTokenOrCookies } from '../middleware/passport' +import axios from 'axios' + +const PROVIDER_NAME_AUTH0_SSO = 'Auth0 SSO' + +class Auth0SSO extends SSOBase { + static LOGIN_URI = '/api/v1/auth0/login' + static CALLBACK_URI = '/api/v1/auth0/callback' + static LOGOUT_URI = '/api/v1/auth0/logout' + + getProviderName(): string { + return PROVIDER_NAME_AUTH0_SSO + } + + static getCallbackURL(): string { + const APP_URL = process.env.APP_URL || 'http://127.0.0.1:' + process.env.PORT + return APP_URL + Auth0SSO.CALLBACK_URI + } + + setSSOConfig(ssoConfig: any) { + super.setSSOConfig(ssoConfig) + if (ssoConfig) { + const { domain, clientID, clientSecret } = this.ssoConfig + + passport.use( + 'auth0', + new Auth0Strategy( + { + domain: domain || 'your_auth0_domain', + clientID: clientID || 'your_auth0_client_id', + clientSecret: clientSecret || 'your_auth0_client_secret', + callbackURL: Auth0SSO.getCallbackURL() || 'http://localhost:3000/auth/auth0/callback', + passReqToCallback: true + }, + async ( + req: Request, + accessToken: string, + refreshToken: string, + extraParams: any, + profile: Profile, + done: (error: any, user?: any) => void + ) => { + const email = profile.emails?.[0]?.value + if (!email) { + await auditService.recordLoginActivity( + '', + LoginActivityCode.UNKNOWN_USER, + ErrorMessage.UNKNOWN_USER, + PROVIDER_NAME_AUTH0_SSO + ) + return done({ name: 'SSO_LOGIN_FAILED', message: ErrorMessage.UNKNOWN_USER }, undefined) + } + return await this.verifyAndLogin(this.app, email, done, profile, accessToken, refreshToken) + } + ) + ) + } else { + passport.unuse('auth0') + } + } + + initialize() { + this.setSSOConfig(this.ssoConfig) + + this.app.get(Auth0SSO.LOGIN_URI, (req, res, next?) => { + if (!this.getSSOConfig()) { + return res.status(400).json({ error: 'Auth0 SSO is not configured.' }) + } + passport.authenticate('auth0', { + scope: 'openid profile email' // Request scopes for profile and email information + })(req, res, next) + }) + + this.app.get(Auth0SSO.CALLBACK_URI, (req, res, next?) => { + if (!this.getSSOConfig()) { + return res.status(400).json({ error: 'Auth0 SSO is not configured.' }) + } + passport.authenticate('auth0', async (err: any, user: LoggedInUser) => { + try { + if (err || !user) { + if (err?.name == 'SSO_LOGIN_FAILED') { + const error = { message: err.message } + const signinUrl = `/signin?error=${encodeURIComponent(JSON.stringify(error))}` + return res.redirect(signinUrl) + } + return next ? next(err) : res.status(401).json(err) + } + + req.session.regenerate((regenerateErr) => { + if (regenerateErr) { + return next ? next(regenerateErr) : res.status(500).json({ message: 'Session regeneration failed' }) + } + + req.login(user, { session: true }, async (error) => { + if (error) return next ? next(error) : res.status(401).json(error) + return setTokenOrCookies(res, user, true, req, true, true) + }) + }) + } catch (error) { + return next ? next(error) : res.status(401).json(error) + } + })(req, res, next) + }) + } + + static async testSetup(ssoConfig: any) { + const { domain, clientID, clientSecret } = ssoConfig + + try { + const tokenResponse = await axios.post( + `https://${domain}/oauth/token`, + { + client_id: clientID, + client_secret: clientSecret, + audience: `https://${domain}/api/v2/`, + grant_type: 'client_credentials' + }, + { + headers: { 'Content-Type': 'application/json' } + } + ) + return { message: tokenResponse.status } + } catch (error) { + const errorMessage = 'Auth0 Configuration test failed. Please check your credentials and domain.' + return { error: errorMessage } + } + } + + async refreshToken(ssoRefreshToken: string) { + const { domain, clientID, clientSecret } = this.ssoConfig + + try { + const response = await axios.post( + `https://${domain}/oauth/token`, + { + client_id: clientID, + client_secret: clientSecret, + grant_type: 'refresh_token', + refresh_token: ssoRefreshToken + }, + { + headers: { 'Content-Type': 'application/json' } + } + ) + return { ...response.data } + } catch (error) { + const errorMessage = 'Failed to get refreshToken from Auth0.' + return { error: errorMessage } + } + } +} + +export default Auth0SSO diff --git a/packages/server/src/enterprise/sso/AzureSSO.ts b/packages/server/src/enterprise/sso/AzureSSO.ts new file mode 100644 index 000000000..35c6d744f --- /dev/null +++ b/packages/server/src/enterprise/sso/AzureSSO.ts @@ -0,0 +1,164 @@ +// AzureSSO.ts +import SSOBase from './SSOBase' +import passport from 'passport' +import { Profile, Strategy as OpenIDConnectStrategy, VerifyCallback } from 'passport-openidconnect' +import { Request } from 'express' +import auditService from '../services/audit' +import { ErrorMessage, LoggedInUser, LoginActivityCode } from '../Interface.Enterprise' +import { setTokenOrCookies } from '../middleware/passport' +import axios from 'axios' + +class AzureSSO extends SSOBase { + static LOGIN_URI = '/api/v1/azure/login' + static CALLBACK_URI = '/api/v1/azure/callback' + static LOGOUT_URI = '/api/v1/azure/logout' + + getProviderName(): string { + return 'Microsoft SSO' + } + + static getCallbackURL(): string { + const APP_URL = process.env.APP_URL || 'http://127.0.0.1:' + process.env.PORT + return APP_URL + AzureSSO.CALLBACK_URI + } + + initialize() { + this.setSSOConfig(this.ssoConfig) + + this.app.get(AzureSSO.LOGIN_URI, (req, res, next?) => { + if (!this.getSSOConfig()) { + return res.status(400).json({ error: 'Azure SSO is not configured.' }) + } + passport.authenticate('azure-ad', async () => { + if (next) next() + })(req, res, next) + }) + + this.app.get(AzureSSO.CALLBACK_URI, (req, res, next?) => { + if (!this.getSSOConfig()) { + return res.status(400).json({ error: 'Azure SSO is not configured.' }) + } + passport.authenticate('azure-ad', async (err: any, user: LoggedInUser) => { + try { + if (err || !user) { + if (err?.name == 'SSO_LOGIN_FAILED') { + const error = { message: err.message } + const signinUrl = `/signin?error=${encodeURIComponent(JSON.stringify(error))}` + return res.redirect(signinUrl) + } + return next ? next(err) : res.status(401).json(err) + } + + req.session.regenerate((regenerateErr) => { + if (regenerateErr) { + return next ? next(regenerateErr) : res.status(500).json({ message: 'Session regeneration failed' }) + } + + req.login(user, { session: true }, async (error) => { + if (error) return next ? next(error) : res.status(401).json(error) + return setTokenOrCookies(res, user, true, req, true, true) + }) + }) + } catch (error) { + return next ? next(error) : res.status(401).json(error) + } + })(req, res, next) + }) + } + + setSSOConfig(ssoConfig: any) { + super.setSSOConfig(ssoConfig) + if (this.ssoConfig) { + const { tenantID, clientID, clientSecret } = this.ssoConfig + passport.use( + 'azure-ad', + new OpenIDConnectStrategy( + { + issuer: `https://login.microsoftonline.com/${tenantID}/v2.0`, + authorizationURL: `https://login.microsoftonline.com/${tenantID}/oauth2/v2.0/authorize`, + tokenURL: `https://login.microsoftonline.com/${tenantID}/oauth2/v2.0/token`, + userInfoURL: `https://graph.microsoft.com/oidc/userinfo`, + clientID: clientID || 'your_client_id', + clientSecret: clientSecret || 'your_client_secret', + callbackURL: AzureSSO.getCallbackURL(), + scope: 'openid profile email offline_access', + passReqToCallback: true + }, + async ( + req: Request, + issuer: string, + profile: Profile, + context: object, + idToken: string | object, + accessToken: string | object, + refreshToken: string, + done: VerifyCallback + ) => { + const email = profile.username + if (!email) { + await auditService.recordLoginActivity( + '', + LoginActivityCode.UNKNOWN_USER, + ErrorMessage.UNKNOWN_USER, + this.getProviderName() + ) + return done({ name: 'SSO_LOGIN_FAILED', message: ErrorMessage.UNKNOWN_USER }, undefined) + } + return this.verifyAndLogin(this.app, email, done, profile, accessToken, refreshToken) + } + ) + ) + } else { + passport.unuse('azure-ad') + } + } + + static async testSetup(ssoConfig: any) { + const { tenantID, clientID, clientSecret } = ssoConfig + + try { + const tokenResponse = await axios.post( + `https://login.microsoftonline.com/${tenantID}/oauth2/v2.0/token`, + new URLSearchParams({ + client_id: clientID, + client_secret: clientSecret, + grant_type: 'client_credentials', + scope: 'https://graph.microsoft.com/.default' + }).toString(), + { + headers: { 'Content-Type': 'application/x-www-form-urlencoded' } + } + ) + return { message: tokenResponse.statusText } + } catch (error) { + const errorMessage = 'Microsoft Configuration test failed. Please check your credentials and Tenant ID.' + return { error: errorMessage } + } + } + + async refreshToken(ssoRefreshToken: string) { + const { tenantID, clientID, clientSecret } = this.ssoConfig + + try { + const response = await axios.post( + `https://login.microsoftonline.com/${tenantID}/oauth2/v2.0/token`, + new URLSearchParams({ + client_id: clientID || '', + client_secret: clientSecret || '', + grant_type: 'refresh_token', + refresh_token: ssoRefreshToken, + scope: 'openid profile email' + }).toString(), + { + headers: { 'Content-Type': 'application/x-www-form-urlencoded' } + } + ) + return { ...response.data } + } catch (error) { + const errorMessage = 'Failed to get refreshToken from Azure.' + return { error: errorMessage } + } + } +} + +export default AzureSSO diff --git a/packages/server/src/enterprise/sso/GithubSSO.ts b/packages/server/src/enterprise/sso/GithubSSO.ts new file mode 100644 index 000000000..f52745ab0 --- /dev/null +++ b/packages/server/src/enterprise/sso/GithubSSO.ts @@ -0,0 +1,158 @@ +import SSOBase from './SSOBase' +import passport from 'passport' +import { LoggedInUser } from '../Interface.Enterprise' +import { setTokenOrCookies } from '../middleware/passport' +import { Strategy as GitHubStrategy, Profile } from 'passport-github' + +class GithubSSO extends SSOBase { + static LOGIN_URI = '/api/v1/github/login' + static CALLBACK_URI = '/api/v1/github/callback' + static LOGOUT_URI = '/api/v1/github/logout' + + getProviderName(): string { + return 'Github SSO' + } + + static getCallbackURL(): string { + const APP_URL = process.env.APP_URL || 'http://127.0.0.1:' + process.env.PORT + return APP_URL + GithubSSO.CALLBACK_URI + } + + setSSOConfig(ssoConfig: any) { + super.setSSOConfig(ssoConfig) + if (this.ssoConfig) { + const clientID = this.ssoConfig.clientID + const clientSecret = this.ssoConfig.clientSecret + + // Configure Passport to use the GitHub strategy + passport.use( + new GitHubStrategy( + { + clientID: clientID, + clientSecret: clientSecret, + callbackURL: GithubSSO.CALLBACK_URI, + scope: ['user:email'] + }, + async (accessToken: string, refreshToken: string, profile: Profile, done: any) => { + // Fetch emails from GitHub API using the access token. + const emailResponse = await fetch('https://api.github.com/user/emails', { + headers: { + Authorization: `token ${accessToken}`, + 'User-Agent': 'Node.js' + } + }) + const emails = await emailResponse.json() + // Look for a verified primary email. + let primaryEmail = emails.find((email: any) => email.primary && email.verified)?.email + if (!primaryEmail && Array.isArray(emails) && emails.length > 0) { + primaryEmail = emails[0].email + } + return this.verifyAndLogin(this.app, primaryEmail, done, profile, accessToken, refreshToken) + } + ) + ) + } else { + passport.unuse('github') + } + } + + initialize() { + if (this.ssoConfig) { + this.setSSOConfig(this.ssoConfig) + } + + this.app.get(GithubSSO.LOGIN_URI, (req, res, next?) => { + if (!this.getSSOConfig()) { + return res.status(400).json({ error: 'Github SSO is not configured.' }) + } + passport.authenticate('github', async () => { + if (next) next() + })(req, res, next) + }) + + this.app.get(GithubSSO.CALLBACK_URI, (req, res, next?) => { + passport.authenticate('github', async (err: any, user: LoggedInUser) => { + try { + if (err || !user) { + if (err?.name == 'SSO_LOGIN_FAILED') { + const error = { message: err.message } + const signinUrl = `/signin?error=${encodeURIComponent(JSON.stringify(error))}` + return res.redirect(signinUrl) + } + return next ? next(err) : res.status(401).json(err) + } + + req.session.regenerate((regenerateErr) => { + if (regenerateErr) { + return next ? next(regenerateErr) : res.status(500).json({ message: 'Session regeneration failed' }) + } + + req.login(user, { session: true }, async (error) => { + if (error) return next ? next(error) : res.status(401).json(error) + return setTokenOrCookies(res, user, true, req, true, true) + }) + }) + } catch (error) { + return next ? next(error) : res.status(401).json(error) + } + })(req, res, next) + }) + } + + static async testSetup(ssoConfig: any) { + const { clientID, clientSecret } = ssoConfig + + try { + const response = await fetch('https://github.com/login/oauth/access_token', { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json' + }, + body: JSON.stringify({ + client_id: clientID, + client_secret: clientSecret, + code: 'dummy_code_for_testing' + }) + }) + const data = await response.json() + if (data.error === 'bad_verification_code') { + return { message: 'ClientID and clientSecret are valid.' } + } else { + return { error: `Invalid credentials. Received error: ${data.error || 'unknown'}` } + } + } catch (error) { + return { error: 'Github Configuration test failed. Please check your credentials.' } + } + } + + async refreshToken(currentRefreshToken: string) { + const { clientID, clientSecret } = this.ssoConfig + + try { + const response = await fetch('https://github.com/login/oauth/access_token', { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json' + }, + body: JSON.stringify({ + client_id: clientID, + client_secret: clientSecret, + grant_type: 'refresh_token', + refresh_token: currentRefreshToken + }) + }) + const data = await response.json() + if (data.error || !data.access_token) { + return { error: 'Failed to get refreshToken from Github.' } + } else { + return data + } + } catch (error) { + return { error: 'Failed to get refreshToken from Github.' } + } + } +} + +export default GithubSSO diff --git a/packages/server/src/enterprise/sso/GoogleSSO.ts b/packages/server/src/enterprise/sso/GoogleSSO.ts new file mode 100644 index 000000000..a4e3ae8ab --- /dev/null +++ b/packages/server/src/enterprise/sso/GoogleSSO.ts @@ -0,0 +1,162 @@ +// GoogleSSO.ts +import SSOBase from './SSOBase' +import passport from 'passport' +import { Profile, Strategy as OpenIDConnectStrategy, VerifyCallback } from 'passport-openidconnect' +import auditService from '../services/audit' +import { ErrorMessage, LoggedInUser, LoginActivityCode } from '../Interface.Enterprise' +import { setTokenOrCookies } from '../middleware/passport' +import axios from 'axios' + +class GoogleSSO extends SSOBase { + static LOGIN_URI = '/api/v1/google/login' + static CALLBACK_URI = '/api/v1/google/callback' + static LOGOUT_URI = '/api/v1/google/logout' + + getProviderName(): string { + return 'Google SSO' + } + + static getCallbackURL(): string { + const APP_URL = process.env.APP_URL || 'http://127.0.0.1:' + process.env.PORT + return APP_URL + GoogleSSO.CALLBACK_URI + } + + setSSOConfig(ssoConfig: any) { + super.setSSOConfig(ssoConfig) + if (this.ssoConfig) { + const clientID = this.ssoConfig.clientID + const clientSecret = this.ssoConfig.clientSecret + + passport.use( + 'google', + new OpenIDConnectStrategy( + { + issuer: 'https://accounts.google.com', + authorizationURL: 'https://accounts.google.com/o/oauth2/v2/auth', + tokenURL: 'https://oauth2.googleapis.com/token', + userInfoURL: 'https://openidconnect.googleapis.com/v1/userinfo', + clientID: clientID || 'your_google_client_id', + clientSecret: clientSecret || 'your_google_client_secret', + callbackURL: GoogleSSO.getCallbackURL() || 'http://localhost:3000/auth/google/callback', + scope: 'openid profile email' + }, + async ( + issuer: string, + profile: Profile, + context: object, + idToken: string | object, + accessToken: string | object, + refreshToken: string, + done: VerifyCallback + ) => { + if (profile.emails && profile.emails.length > 0) { + const email = profile.emails[0].value + return this.verifyAndLogin(this.app, email, done, profile, accessToken, refreshToken) + } else { + await auditService.recordLoginActivity( + '', + LoginActivityCode.UNKNOWN_USER, + ErrorMessage.UNKNOWN_USER, + this.getProviderName() + ) + return done({ name: 'SSO_LOGIN_FAILED', message: ErrorMessage.UNKNOWN_USER }, undefined) + } + } + ) + ) + } else { + passport.unuse('google') + } + } + + initialize() { + if (this.ssoConfig) { + this.setSSOConfig(this.ssoConfig) + } + + this.app.get(GoogleSSO.LOGIN_URI, (req, res, next?) => { + if (!this.getSSOConfig()) { + return res.status(400).json({ error: 'Google SSO is not configured.' }) + } + passport.authenticate('google', async () => { + if (next) next() + })(req, res, next) + }) + + this.app.get(GoogleSSO.CALLBACK_URI, (req, res, next?) => { + if (!this.getSSOConfig()) { + return res.status(400).json({ error: 'Google SSO is not configured.' }) + } + passport.authenticate('google', async (err: any, user: LoggedInUser) => { + try { + if (err || !user) { + if (err?.name == 'SSO_LOGIN_FAILED') { + const error = { message: err.message } + const signinUrl = `/signin?error=${encodeURIComponent(JSON.stringify(error))}` + return res.redirect(signinUrl) + } + return next ? next(err) : res.status(401).json(err) + } + + req.session.regenerate((regenerateErr) => { + if (regenerateErr) { + return next ? next(regenerateErr) : res.status(500).json({ message: 'Session regeneration failed' }) + } + + req.login(user, { session: true }, async (error) => { + if (error) return next ? next(error) : res.status(401).json(error) + return setTokenOrCookies(res, user, true, req, true, true) + }) + }) + } catch (error) { + return next ? next(error) : res.status(401).json(error) + } + })(req, res, next) + }) + } + + static async testSetup(ssoConfig: any) { + const { clientID, redirectURL } = ssoConfig + + try { + const authorizationUrl = `https://accounts.google.com/o/oauth2/v2/auth?${new URLSearchParams({ + client_id: clientID, + redirect_uri: redirectURL, + response_type: 'code', + scope: 'openid email profile' + }).toString()}` + + const tokenResponse = await axios.get(authorizationUrl) + return { message: tokenResponse.statusText } + } catch (error) { + const errorMessage = 'Google Configuration test failed. Please check your credentials.' + return { error: errorMessage } + } + } + + async refreshToken(ssoRefreshToken: string) { + const { clientID, clientSecret } = this.ssoConfig + + try { + const response = await axios.post( + `https://oauth2.googleapis.com/token`, + new URLSearchParams({ + client_id: clientID || '', + client_secret: clientSecret || '', + grant_type: 'refresh_token', + refresh_token: ssoRefreshToken, + scope: 'refresh_token' + }).toString(), + { + headers: { 'Content-Type': 'application/x-www-form-urlencoded' } + } + ) + return { ...response.data } + } catch (error) { + const errorMessage = 'Failed to get refreshToken from Google.' + return { error: errorMessage } + } + } +} + +export default GoogleSSO diff --git a/packages/server/src/enterprise/sso/SSOBase.ts b/packages/server/src/enterprise/sso/SSOBase.ts new file mode 100644 index 000000000..e216c6977 --- /dev/null +++ b/packages/server/src/enterprise/sso/SSOBase.ts @@ -0,0 +1,153 @@ +// SSOBase.ts +import express from 'express' +import passport from 'passport' +import { IAssignedWorkspace, LoggedInUser } from '../Interface.Enterprise' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { UserErrorMessage, UserService } from '../services/user.service' +import { WorkspaceUserService } from '../services/workspace-user.service' +import { AccountService } from '../services/account.service' +import { WorkspaceUser } from '../database/entities/workspace-user.entity' +import { OrganizationService } from '../services/organization.service' +import { GeneralRole } from '../database/entities/role.entity' +import { RoleErrorMessage, RoleService } from '../services/role.service' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { StatusCodes } from 'http-status-codes' +import { Platform } from '../../Interface' +import { UserStatus } from '../database/entities/user.entity' + +abstract class SSOBase { + protected app: express.Application + protected ssoConfig: any + + constructor(app: express.Application, ssoConfig?: any) { + this.app = app + this.ssoConfig = ssoConfig + } + + setSSOConfig(ssoConfig: any) { + this.ssoConfig = ssoConfig + } + + getSSOConfig() { + return this.ssoConfig + } + + abstract getProviderName(): string + abstract initialize(): void + abstract refreshToken(ssoRefreshToken: string): Promise<{ [key: string]: any }> + async verifyAndLogin( + app: express.Application, + email: string, + done: (err?: Error | null, user?: Express.User, info?: any) => void, + profile: passport.Profile, + accessToken: string | object, + refreshToken: string + ) { + let queryRunner + const ssoProviderName = this.getProviderName() + try { + queryRunner = getRunningExpressApp().AppDataSource.createQueryRunner() + await queryRunner.connect() + + const userService = new UserService() + const organizationService = new OrganizationService() + const workspaceUserService = new WorkspaceUserService() + + let user: any = await userService.readUserByEmail(email, queryRunner) + let wu: any = {} + + if (!user) { + // In ENTERPRISE mode, we don't want to create a new user if the user is not found + if (getRunningExpressApp().identityManager.getPlatformType() === Platform.ENTERPRISE) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + } + // no user found, register the user + const data: any = { + user: { + email: email, + name: profile.displayName || email, + status: UserStatus.ACTIVE, + credential: undefined + } + } + if (getRunningExpressApp().identityManager.getPlatformType() === Platform.CLOUD) { + const accountService = new AccountService() + const newAccount = await accountService.register(data) + wu = newAccount.workspaceUser + wu.workspace = newAccount.workspace + user = newAccount.user + } + } else { + if (user.status === UserStatus.INVITED) { + const data: any = { + user: { + ...user, + email, + name: profile.displayName || '', + status: UserStatus.ACTIVE, + credential: undefined + } + } + const accountService = new AccountService() + const newAccount = await accountService.register(data) + user = newAccount.user + } + let wsUserOrUsers = await workspaceUserService.readWorkspaceUserByLastLogin(user?.id, queryRunner) + wu = Array.isArray(wsUserOrUsers) && wsUserOrUsers.length > 0 ? wsUserOrUsers[0] : (wsUserOrUsers as WorkspaceUser) + } + + const workspaceUser = wu as WorkspaceUser + let roleService = new RoleService() + const ownerRole = await roleService.readGeneralRoleByName(GeneralRole.OWNER, queryRunner) + const role = await roleService.readRoleById(workspaceUser.roleId, queryRunner) + if (!role) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + + const workspaceUsers = await workspaceUserService.readWorkspaceUserByUserId(workspaceUser.userId, queryRunner) + const assignedWorkspaces: IAssignedWorkspace[] = workspaceUsers.map((workspaceUser) => { + return { + id: workspaceUser.workspace.id, + name: workspaceUser.workspace.name, + role: workspaceUser.role?.name, + organizationId: workspaceUser.workspace.organizationId + } as IAssignedWorkspace + }) + + const organization = await organizationService.readOrganizationById(workspaceUser.workspace.organizationId, queryRunner) + if (!organization) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, 'Organization not found') + const subscriptionId = organization.subscriptionId as string + const customerId = organization.customerId as string + const features = await getRunningExpressApp().identityManager.getFeaturesByPlan(subscriptionId) + const productId = await getRunningExpressApp().identityManager.getProductIdFromSubscription(subscriptionId) + + const loggedInUser: LoggedInUser = { + id: workspaceUser.userId, + email: user?.email || '', + name: user?.name || '', + roleId: workspaceUser.roleId, + activeOrganizationId: organization.id, + activeOrganizationSubscriptionId: subscriptionId, + activeOrganizationCustomerId: customerId, + activeOrganizationProductId: productId, + isOrganizationAdmin: workspaceUser.roleId === ownerRole?.id, + activeWorkspaceId: workspaceUser.workspaceId, + activeWorkspace: workspaceUser.workspace.name, + assignedWorkspaces, + ssoToken: accessToken as string, + ssoRefreshToken: refreshToken, + ssoProvider: ssoProviderName, + permissions: [...JSON.parse(role.permissions)], + features + } + return done(null, loggedInUser as Express.User, { message: 'Logged in Successfully' }) + } catch (error) { + return done( + { name: 'SSO_LOGIN_FAILED', message: ssoProviderName + ' Login failed! Please contact your administrator.' }, + undefined + ) + } finally { + if (queryRunner && !queryRunner.isReleased) await queryRunner.release() + } + } +} + +export default SSOBase diff --git a/packages/server/src/enterprise/utils/ControllerServiceUtils.ts b/packages/server/src/enterprise/utils/ControllerServiceUtils.ts new file mode 100644 index 000000000..1d0983c67 --- /dev/null +++ b/packages/server/src/enterprise/utils/ControllerServiceUtils.ts @@ -0,0 +1,19 @@ +import { Equal } from 'typeorm' +import { Request } from 'express' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { StatusCodes } from 'http-status-codes' + +export const getWorkspaceSearchOptions = (workspaceId?: string) => { + if (!workspaceId) { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, `Workspace ID is required`) + } + return { workspaceId: Equal(workspaceId) } +} + +export const getWorkspaceSearchOptionsFromReq = (req: Request) => { + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, `Workspace ID is required`) + } + return { workspaceId: Equal(workspaceId) } +} diff --git a/packages/server/src/enterprise/utils/encryption.util.ts b/packages/server/src/enterprise/utils/encryption.util.ts new file mode 100644 index 000000000..60c2acc7e --- /dev/null +++ b/packages/server/src/enterprise/utils/encryption.util.ts @@ -0,0 +1,22 @@ +import bcrypt from 'bcryptjs' +import { AES, enc } from 'crypto-js' +import { getEncryptionKey } from '../../utils' + +export function getHash(value: string) { + const salt = bcrypt.genSaltSync(parseInt(process.env.PASSWORD_SALT_HASH_ROUNDS || '5')) + return bcrypt.hashSync(value, salt) +} + +export function compareHash(value1: string, value2: string) { + return bcrypt.compareSync(value1, value2) +} + +export async function encrypt(value: string) { + const encryptionKey = await getEncryptionKey() + return AES.encrypt(value, encryptionKey).toString() +} + +export async function decrypt(value: string) { + const encryptionKey = await getEncryptionKey() + return AES.decrypt(value, encryptionKey).toString(enc.Utf8) +} diff --git a/packages/server/src/enterprise/utils/sendEmail.ts b/packages/server/src/enterprise/utils/sendEmail.ts new file mode 100644 index 000000000..2292dd25f --- /dev/null +++ b/packages/server/src/enterprise/utils/sendEmail.ts @@ -0,0 +1,120 @@ +import * as handlebars from 'handlebars' +import nodemailer from 'nodemailer' +import fs from 'node:fs' +import path from 'path' +import { Platform } from '../../Interface' + +const SMTP_HOST = process.env.SMTP_HOST +const SMTP_PORT = parseInt(process.env.SMTP_PORT as string, 10) +const SMTP_USER = process.env.SMTP_USER +const SMTP_PASSWORD = process.env.SMTP_PASSWORD +const SENDER_EMAIL = process.env.SENDER_EMAIL +const SMTP_SECURE = process.env.SMTP_SECURE ? process.env.SMTP_SECURE === 'true' : true +const TLS = process.env.ALLOW_UNAUTHORIZED_CERTS ? { rejectUnauthorized: false } : undefined + +const transporter = nodemailer.createTransport({ + host: SMTP_HOST, + port: SMTP_PORT, + secure: SMTP_SECURE ?? true, + auth: { + user: SMTP_USER, + pass: SMTP_PASSWORD + }, + tls: TLS +}) + +const getEmailTemplate = (defaultTemplateName: string, userTemplatePath?: string) => { + try { + if (userTemplatePath) { + return fs.readFileSync(userTemplatePath, 'utf8') + } + } catch (error) { + console.warn(`Failed to load custom template from ${userTemplatePath}, falling back to default`) + } + return fs.readFileSync(path.join(__dirname, '../', 'emails', defaultTemplateName), 'utf8') +} + +const sendWorkspaceAdd = async (email: string, workspaceName: string, dashboardLink: string) => { + let htmlToSend + let textContent + + const template = getEmailTemplate('workspace_add_cloud.hbs', process.env.WORKSPACE_INVITE_TEMPLATE_PATH) + const compiledWorkspaceInviteTemplateSource = handlebars.compile(template) + htmlToSend = compiledWorkspaceInviteTemplateSource({ workspaceName, dashboardLink }) + textContent = `You have been added to ${workspaceName}. Click here to visit your dashboard: ${dashboardLink}` // plain text body + + await transporter.sendMail({ + from: SENDER_EMAIL || '"FlowiseAI Team" ', // sender address + to: email, + subject: `You have been added to ${workspaceName}`, // Subject line + text: textContent, // plain text body + html: htmlToSend // html body + }) +} + +const sendWorkspaceInvite = async ( + email: string, + workspaceName: string, + registerLink: string, + platform: Platform = Platform.ENTERPRISE, + inviteType: 'new' | 'update' = 'new' +) => { + let htmlToSend + let textContent + + const template = + platform === Platform.ENTERPRISE + ? getEmailTemplate( + inviteType === 'new' ? 'workspace_new_invite_enterprise.hbs' : 'workspace_update_invite_enterprise.hbs', + process.env.WORKSPACE_INVITE_TEMPLATE_PATH + ) + : getEmailTemplate( + inviteType === 'new' ? 'workspace_new_invite_cloud.hbs' : 'workspace_update_invite_cloud.hbs', + process.env.WORKSPACE_INVITE_TEMPLATE_PATH + ) + const compiledWorkspaceInviteTemplateSource = handlebars.compile(template) + htmlToSend = compiledWorkspaceInviteTemplateSource({ workspaceName, registerLink }) + textContent = `You have been invited to ${workspaceName}. Click here to register: ${registerLink}` // plain text body + + await transporter.sendMail({ + from: SENDER_EMAIL || '"FlowiseAI Team" ', // sender address + to: email, + subject: `You have been invited to ${workspaceName}`, // Subject line + text: textContent, // plain text body + html: htmlToSend // html body + }) +} + +const sendPasswordResetEmail = async (email: string, resetLink: string) => { + const passwordResetTemplateSource = fs.readFileSync(path.join(__dirname, '../', 'emails', 'workspace_user_reset_password.hbs'), 'utf8') + const compiledPasswordResetTemplateSource = handlebars.compile(passwordResetTemplateSource) + + const htmlToSend = compiledPasswordResetTemplateSource({ resetLink }) + await transporter.sendMail({ + from: SENDER_EMAIL || '"FlowiseAI Team" ', // sender address + to: email, + subject: 'Reset your password', // Subject line + text: `You requested a link to reset your password. Click here to reset the password: ${resetLink}`, // plain text body + html: htmlToSend // html body + }) +} + +const sendVerificationEmailForCloud = async (email: string, verificationLink: string) => { + let htmlToSend + let textContent + + const template = getEmailTemplate('verify_email_cloud.hbs') + const compiledWorkspaceInviteTemplateSource = handlebars.compile(template) + htmlToSend = compiledWorkspaceInviteTemplateSource({ verificationLink }) + textContent = `To complete your registration, we need to verify your email address. Click here to verify your email address: ${verificationLink}` // plain text body + + await transporter.sendMail({ + from: SENDER_EMAIL || '"FlowiseAI Team" ', // sender address + to: email, + subject: 'Action Required: Please verify your email', // Subject line + text: textContent, // plain text body + html: htmlToSend // html body + }) +} + +export { sendWorkspaceAdd, sendWorkspaceInvite, sendPasswordResetEmail, sendVerificationEmailForCloud } diff --git a/packages/server/src/enterprise/utils/tempTokenUtils.ts b/packages/server/src/enterprise/utils/tempTokenUtils.ts new file mode 100644 index 000000000..6e6ba8b76 --- /dev/null +++ b/packages/server/src/enterprise/utils/tempTokenUtils.ts @@ -0,0 +1,102 @@ +import { LoggedInUser } from '../Interface.Enterprise' +import * as crypto from 'crypto' +import moment from 'moment' +import { customAlphabet } from 'nanoid' + +const nanoid = customAlphabet('0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz', 64) + +// Generate a copy of the users without their passwords. +export const generateSafeCopy = (user: Partial, deleteEmail?: boolean): any => { + let _user: any = { ...user } + delete _user.credential + delete _user.tempToken + delete _user.tokenExpiry + if (deleteEmail) { + delete _user.email + } + delete _user.workspaceIds + delete _user.ssoToken + delete _user.ssoRefreshToken + return _user +} + +export const generateTempToken = () => { + // generate a token with nanoid and return it + const token = nanoid() + return token +} + +// Encrypt token with password using crypto.Cipheriv +export const encryptToken = (stringToEncrypt: string) => { + const key = crypto + .createHash('sha256') + .update(process.env.TOKEN_HASH_SECRET || 'Secre$t') + .digest() + + const IV_LENGTH = 16 + const iv = crypto.randomBytes(IV_LENGTH) + const cipher = crypto.createCipheriv('aes-256-cbc', key, iv) + const encrypted = cipher.update(stringToEncrypt) + + const result = Buffer.concat([encrypted, cipher.final()]) + + // formatted string [iv]:[token] + return iv.toString('hex') + ':' + result.toString('hex') +} + +// Decrypt token using the inverse of encryption crypto algorithm +export const decryptToken = (stringToDecrypt: string): string | undefined => { + try { + const key = crypto + .createHash('sha256') + .update(process.env.TOKEN_HASH_SECRET || 'Secre$t') + .digest() + + let textParts = stringToDecrypt.split(':') + let iv = Buffer.from(textParts.shift() as string, 'hex') + let encryptedText = Buffer.from(textParts.join(':'), 'hex') + let decipher = crypto.createDecipheriv('aes-256-cbc', key, iv) + let decrypted = decipher.update(encryptedText) + + const result = Buffer.concat([decrypted, decipher.final()]) + + return result.toString() + } catch (error) { + return undefined + } +} + +// Extract userUUID from decrypted token string +export const getUserUUIDFromToken = (token: string): string | undefined => { + try { + const userUUIDHash = token.split('-')[2] + return Buffer.from(userUUIDHash, 'base64').toString('ascii') + } catch (error) { + return undefined + } +} + +export const isTokenValid = (tokenExpiry: Date, tokenType: TokenType): boolean => { + // Using moment.diff method for retrieve dates difference in hours + const tokenTimestampDate = moment(tokenExpiry) + const now = moment() + + if (tokenType === TokenType.INVITE) { + const expiryInHours = process.env.INVITE_TOKEN_EXPIRY_IN_HOURS ? parseInt(process.env.INVITE_TOKEN_EXPIRY_IN_HOURS) : 24 + // Fail if more than 24 hours + const diff = now.diff(tokenTimestampDate, 'hours') + if (Math.abs(diff) > expiryInHours) return false + } else if (tokenType === TokenType.PASSWORD_RESET) { + const expiryInMins = process.env.PASSWORD_RESET_TOKEN_EXPIRY_IN_MINUTES + ? parseInt(process.env.PASSWORD_RESET_TOKEN_EXPIRY_IN_MINUTES) + : 15 + const diff = now.diff(tokenTimestampDate, 'minutes') + if (Math.abs(diff) > expiryInMins) return false + } + return true +} + +export enum TokenType { + INVITE = 'INVITE', + PASSWORD_RESET = 'PASSWORD_RESET' +} diff --git a/packages/server/src/enterprise/utils/validation.util.ts b/packages/server/src/enterprise/utils/validation.util.ts new file mode 100644 index 000000000..c1738fc46 --- /dev/null +++ b/packages/server/src/enterprise/utils/validation.util.ts @@ -0,0 +1,33 @@ +export function isInvalidUUID(id: unknown): boolean { + const regexUUID = /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i + return !id || typeof id !== 'string' || !regexUUID.test(id) +} + +export function isInvalidEmail(email: unknown): boolean { + const regexEmail = /^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$/ + return !email || typeof email !== 'string' || email.length > 255 || !regexEmail.test(email) +} + +export function isInvalidName(name: unknown): boolean { + return !name || typeof name !== 'string' || name.length > 100 +} + +export function isInvalidDateTime(dateTime: unknown): boolean { + const regexDateTime = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d+)?(Z|[+-]\d{2}:\d{2})?$/ + return !dateTime || typeof dateTime !== 'string' || !regexDateTime.test(dateTime) +} + +export function isInvalidPassword(password: unknown): boolean { + // Minimum Length: At least 8 characters + // Maximum Length: No more than 128 characters + // Lowercase Letter: Must contain at least one lowercase letter (a-z) + // Uppercase Letter: Must contain at least one uppercase letter (A-Z) + // Digit: Must contain at least one number (0-9) + // Special Character: Must contain at least one special character (anything that's not a letter or number) + if (!password || typeof password !== 'string' || password.length > 128) { + return true + } + + const regexPassword = /^(?=.*[a-z])(?=.*[A-Z])(?=.*\d)(?=.*[^a-zA-Z0-9]).{8,}$/ + return !regexPassword.test(password) +} diff --git a/packages/server/src/index.ts b/packages/server/src/index.ts index fb7618b5f..258be4cbd 100644 --- a/packages/server/src/index.ts +++ b/packages/server/src/index.ts @@ -1,11 +1,10 @@ -import express from 'express' -import { Request, Response } from 'express' +import express, { Request, Response } from 'express' import path from 'path' import cors from 'cors' import http from 'http' -import basicAuth from 'express-basic-auth' -import { DataSource } from 'typeorm' -import { MODE } from './Interface' +import cookieParser from 'cookie-parser' +import { DataSource, IsNull } from 'typeorm' +import { MODE, Platform } from './Interface' import { getNodeModulesPackagePath, getEncryptionKey } from './utils' import logger, { expressRequestLogger } from './utils/logger' import { getDataSource } from './DataSource' @@ -14,23 +13,35 @@ import { ChatFlow } from './database/entities/ChatFlow' import { CachePool } from './CachePool' import { AbortControllerPool } from './AbortControllerPool' import { RateLimiterManager } from './utils/rateLimit' -import { getAPIKeys } from './utils/apiKey' -import { sanitizeMiddleware, getCorsOptions, getAllowedIframeOrigins } from './utils/XSS' +import { getAllowedIframeOrigins, getCorsOptions, sanitizeMiddleware } from './utils/XSS' import { Telemetry } from './utils/telemetry' import flowiseApiV1Router from './routes' import errorHandlerMiddleware from './middlewares/errors' +import { WHITELIST_URLS } from './utils/constants' +import { initializeJwtCookieMiddleware, verifyToken } from './enterprise/middleware/passport' +import { IdentityManager } from './IdentityManager' import { SSEStreamer } from './utils/SSEStreamer' import { validateAPIKey } from './utils/validateKey' +import { LoggedInUser } from './enterprise/Interface.Enterprise' import { IMetricsProvider } from './Interface.Metrics' import { Prometheus } from './metrics/Prometheus' import { OpenTelemetry } from './metrics/OpenTelemetry' import { QueueManager } from './queue/QueueManager' import { RedisEventSubscriber } from './queue/RedisEventSubscriber' -import { WHITELIST_URLS } from './utils/constants' import 'global-agent/bootstrap' +import { UsageCacheManager } from './UsageCacheManager' +import { Workspace } from './enterprise/database/entities/workspace.entity' +import { Organization } from './enterprise/database/entities/organization.entity' +import { GeneralRole, Role } from './enterprise/database/entities/role.entity' +import { migrateApiKeysFromJsonToDb } from './utils/apiKey' +import { ExpressAdapter } from '@bull-board/express' declare global { namespace Express { + interface User extends LoggedInUser {} + interface Request { + user?: LoggedInUser + } namespace Multer { interface File { bucket: string @@ -57,9 +68,12 @@ export class App { rateLimiterManager: RateLimiterManager AppDataSource: DataSource = getDataSource() sseStreamer: SSEStreamer + identityManager: IdentityManager metricsProvider: IMetricsProvider queueManager: QueueManager redisSubscriber: RedisEventSubscriber + usageCacheManager: UsageCacheManager + sessionStore: any constructor() { this.app = express() @@ -69,52 +83,75 @@ export class App { // Initialize database try { await this.AppDataSource.initialize() - logger.info('๐Ÿ“ฆ [server]: Data Source is initializing...') + logger.info('๐Ÿ“ฆ [server]: Data Source initialized successfully') // Run Migrations Scripts await this.AppDataSource.runMigrations({ transaction: 'each' }) + logger.info('๐Ÿ”„ [server]: Database migrations completed successfully') + + // Initialize Identity Manager + this.identityManager = await IdentityManager.getInstance() + logger.info('๐Ÿ” [server]: Identity Manager initialized successfully') // Initialize nodes pool this.nodesPool = new NodesPool() await this.nodesPool.initialize() + logger.info('๐Ÿ”ง [server]: Nodes pool initialized successfully') // Initialize abort controllers pool this.abortControllerPool = new AbortControllerPool() - - // Initialize API keys - await getAPIKeys() + logger.info('โน๏ธ [server]: Abort controllers pool initialized successfully') // Initialize encryption key await getEncryptionKey() + logger.info('๐Ÿ”‘ [server]: Encryption key initialized successfully') // Initialize Rate Limit this.rateLimiterManager = RateLimiterManager.getInstance() await this.rateLimiterManager.initializeRateLimiters(await getDataSource().getRepository(ChatFlow).find()) + logger.info('๐Ÿšฆ [server]: Rate limiters initialized successfully') // Initialize cache pool this.cachePool = new CachePool() + logger.info('๐Ÿ’พ [server]: Cache pool initialized successfully') + + // Initialize usage cache manager + this.usageCacheManager = await UsageCacheManager.getInstance() + logger.info('๐Ÿ“Š [server]: Usage cache manager initialized successfully') // Initialize telemetry this.telemetry = new Telemetry() + logger.info('๐Ÿ“ˆ [server]: Telemetry initialized successfully') // Initialize SSE Streamer this.sseStreamer = new SSEStreamer() + logger.info('๐ŸŒŠ [server]: SSE Streamer initialized successfully') // Init Queues if (process.env.MODE === MODE.QUEUE) { this.queueManager = QueueManager.getInstance() + const serverAdapter = new ExpressAdapter() + serverAdapter.setBasePath('/admin/queues') this.queueManager.setupAllQueues({ componentNodes: this.nodesPool.componentNodes, telemetry: this.telemetry, cachePool: this.cachePool, appDataSource: this.AppDataSource, - abortControllerPool: this.abortControllerPool + abortControllerPool: this.abortControllerPool, + usageCacheManager: this.usageCacheManager, + serverAdapter }) + logger.info('โœ… [Queue]: All queues setup successfully') + this.redisSubscriber = new RedisEventSubscriber(this.sseStreamer) await this.redisSubscriber.connect() + logger.info('๐Ÿ”— [server]: Redis event subscriber connected successfully') } - logger.info('๐Ÿ“ฆ [server]: Data Source has been initialized!') + // TODO: Remove this by end of 2025 + await migrateApiKeysFromJsonToDb(this.AppDataSource, this.identityManager.getPlatformType()) + + logger.info('๐ŸŽ‰ [server]: All initialization steps completed successfully!') } catch (error) { logger.error('โŒ [server]: Error during Data Source initialization:', error) } @@ -125,12 +162,28 @@ export class App { const flowise_file_size_limit = process.env.FLOWISE_FILE_SIZE_LIMIT || '50mb' this.app.use(express.json({ limit: flowise_file_size_limit })) this.app.use(express.urlencoded({ limit: flowise_file_size_limit, extended: true })) - if (process.env.NUMBER_OF_PROXIES && parseInt(process.env.NUMBER_OF_PROXIES) > 0) - this.app.set('trust proxy', parseInt(process.env.NUMBER_OF_PROXIES)) + + // Enhanced trust proxy settings for load balancer + let trustProxy: string | boolean | number | undefined = process.env.TRUST_PROXY + if (typeof trustProxy === 'undefined' || trustProxy.trim() === '' || trustProxy === 'true') { + // Default to trust all proxies + trustProxy = true + } else if (trustProxy === 'false') { + // Disable trust proxy + trustProxy = false + } else if (!isNaN(Number(trustProxy))) { + // Number: Trust specific number of proxies + trustProxy = Number(trustProxy) + } + + this.app.set('trust proxy', trustProxy) // Allow access from specified domains this.app.use(cors(getCorsOptions())) + // Parse cookies + this.app.use(cookieParser()) + // Allow embedding from specified domains. this.app.use((req, res, next) => { const allowedOrigins = getAllowedIframeOrigins() @@ -152,70 +205,96 @@ export class App { // Add the sanitizeMiddleware to guard against XSS this.app.use(sanitizeMiddleware) - const whitelistURLs = WHITELIST_URLS + this.app.use((req, res, next) => { + res.header('Access-Control-Allow-Credentials', 'true') // Allow credentials (cookies, etc.) + if (next) next() + }) + + const denylistURLs = process.env.DENYLIST_URLS ? process.env.DENYLIST_URLS.split(',') : [] + const whitelistURLs = WHITELIST_URLS.filter((url) => !denylistURLs.includes(url)) const URL_CASE_INSENSITIVE_REGEX: RegExp = /\/api\/v1\//i const URL_CASE_SENSITIVE_REGEX: RegExp = /\/api\/v1\// - if (process.env.FLOWISE_USERNAME && process.env.FLOWISE_PASSWORD) { - const username = process.env.FLOWISE_USERNAME - const password = process.env.FLOWISE_PASSWORD - const basicAuthMiddleware = basicAuth({ - users: { [username]: password } - }) - this.app.use(async (req, res, next) => { - // Step 1: Check if the req path contains /api/v1 regardless of case - if (URL_CASE_INSENSITIVE_REGEX.test(req.path)) { - // Step 2: Check if the req path is case sensitive - if (URL_CASE_SENSITIVE_REGEX.test(req.path)) { - // Step 3: Check if the req path is in the whitelist - const isWhitelisted = whitelistURLs.some((url) => req.path.startsWith(url)) - if (isWhitelisted) { - next() - } else if (req.headers['x-request-from'] === 'internal') { - basicAuthMiddleware(req, res, next) - } else { - const isKeyValidated = await validateAPIKey(req) - if (!isKeyValidated) { + await initializeJwtCookieMiddleware(this.app, this.identityManager) + + this.app.use(async (req, res, next) => { + // Step 1: Check if the req path contains /api/v1 regardless of case + if (URL_CASE_INSENSITIVE_REGEX.test(req.path)) { + // Step 2: Check if the req path is casesensitive + if (URL_CASE_SENSITIVE_REGEX.test(req.path)) { + // Step 3: Check if the req path is in the whitelist + const isWhitelisted = whitelistURLs.some((url) => req.path.startsWith(url)) + if (isWhitelisted) { + next() + } else if (req.headers['x-request-from'] === 'internal') { + verifyToken(req, res, next) + } else { + // Only check license validity for non-open-source platforms + if (this.identityManager.getPlatformType() !== Platform.OPEN_SOURCE) { + if (!this.identityManager.isLicenseValid()) { return res.status(401).json({ error: 'Unauthorized Access' }) } - next() } - } else { - return res.status(401).json({ error: 'Unauthorized Access' }) + + const { isValid, workspaceId: apiKeyWorkSpaceId } = await validateAPIKey(req) + if (!isValid) { + return res.status(401).json({ error: 'Unauthorized Access' }) + } + + // Find workspace + const workspace = await this.AppDataSource.getRepository(Workspace).findOne({ + where: { id: apiKeyWorkSpaceId } + }) + if (!workspace) { + return res.status(401).json({ error: 'Unauthorized Access' }) + } + + // Find owner role + const ownerRole = await this.AppDataSource.getRepository(Role).findOne({ + where: { name: GeneralRole.OWNER, organizationId: IsNull() } + }) + if (!ownerRole) { + return res.status(401).json({ error: 'Unauthorized Access' }) + } + + // Find organization + const activeOrganizationId = workspace.organizationId as string + const org = await this.AppDataSource.getRepository(Organization).findOne({ + where: { id: activeOrganizationId } + }) + if (!org) { + return res.status(401).json({ error: 'Unauthorized Access' }) + } + const subscriptionId = org.subscriptionId as string + const customerId = org.customerId as string + const features = await this.identityManager.getFeaturesByPlan(subscriptionId) + const productId = await this.identityManager.getProductIdFromSubscription(subscriptionId) + + // @ts-ignore + req.user = { + permissions: [...JSON.parse(ownerRole.permissions)], + features, + activeOrganizationId: activeOrganizationId, + activeOrganizationSubscriptionId: subscriptionId, + activeOrganizationCustomerId: customerId, + activeOrganizationProductId: productId, + isOrganizationAdmin: true, + activeWorkspaceId: apiKeyWorkSpaceId!, + activeWorkspace: workspace.name + } + next() } } else { - // If the req path does not contain /api/v1, then allow the request to pass through, example: /assets, /canvas - next() + return res.status(401).json({ error: 'Unauthorized Access' }) } - }) - } else { - this.app.use(async (req, res, next) => { - // Step 1: Check if the req path contains /api/v1 regardless of case - if (URL_CASE_INSENSITIVE_REGEX.test(req.path)) { - // Step 2: Check if the req path is case sensitive - if (URL_CASE_SENSITIVE_REGEX.test(req.path)) { - // Step 3: Check if the req path is in the whitelist - const isWhitelisted = whitelistURLs.some((url) => req.path.startsWith(url)) - if (isWhitelisted) { - next() - } else if (req.headers['x-request-from'] === 'internal') { - next() - } else { - const isKeyValidated = await validateAPIKey(req) - if (!isKeyValidated) { - return res.status(401).json({ error: 'Unauthorized Access' }) - } - next() - } - } else { - return res.status(401).json({ error: 'Unauthorized Access' }) - } - } else { - // If the req path does not contain /api/v1, then allow the request to pass through, example: /assets, /canvas - next() - } - }) - } + } else { + // If the req path does not contain /api/v1, then allow the request to pass through, example: /assets, /canvas + next() + } + }) + + // this is for SSO and must be after the JWT cookie middleware + await this.identityManager.initializeSSO(this.app) if (process.env.ENABLE_METRICS === 'true') { switch (process.env.METRICS_PROVIDER) { @@ -251,7 +330,7 @@ export class App { }) }) - if (process.env.MODE === MODE.QUEUE && process.env.ENABLE_BULLMQ_DASHBOARD === 'true') { + if (process.env.MODE === MODE.QUEUE && process.env.ENABLE_BULLMQ_DASHBOARD === 'true' && !this.identityManager.isCloud()) { this.app.use('/admin/queues', this.queueManager.getBullBoardRouter()) } diff --git a/packages/server/src/middlewares/errors/index.ts b/packages/server/src/middlewares/errors/index.ts index 88b3dd80c..44d56abe6 100644 --- a/packages/server/src/middlewares/errors/index.ts +++ b/packages/server/src/middlewares/errors/index.ts @@ -5,15 +5,17 @@ import { InternalFlowiseError } from '../../errors/internalFlowiseError' // we need eslint because we have to pass next arg for the error middleware // eslint-disable-next-line async function errorHandlerMiddleware(err: InternalFlowiseError, req: Request, res: Response, next: NextFunction) { + const statusCode = err.statusCode || StatusCodes.INTERNAL_SERVER_ERROR if (err.message.includes('401 Incorrect API key provided')) err.message = '401 Invalid model key or Incorrect local model configuration.' let displayedError = { - statusCode: err.statusCode || StatusCodes.INTERNAL_SERVER_ERROR, + statusCode, success: false, message: err.message, // Provide error stack trace only in development stack: process.env.NODE_ENV === 'development' ? err.stack : {} } + if (!req.body || !req.body.streaming || req.body.streaming === 'false') { res.setHeader('Content-Type', 'application/json') res.status(displayedError.statusCode).json(displayedError) diff --git a/packages/server/src/queue/BaseQueue.ts b/packages/server/src/queue/BaseQueue.ts index d3bf18d29..87b3a9fae 100644 --- a/packages/server/src/queue/BaseQueue.ts +++ b/packages/server/src/queue/BaseQueue.ts @@ -57,22 +57,53 @@ export abstract class BaseQueue { } public createWorker(concurrency: number = WORKER_CONCURRENCY): Worker { - this.worker = new Worker( - this.queue.name, - async (job: Job) => { - const start = new Date().getTime() - logger.info(`Processing job ${job.id} in ${this.queue.name} at ${new Date().toISOString()}`) - const result = await this.processJob(job.data) - const end = new Date().getTime() - logger.info(`Completed job ${job.id} in ${this.queue.name} at ${new Date().toISOString()} (${end - start}ms)`) - return result - }, - { - connection: this.connection, - concurrency - } - ) - return this.worker + try { + this.worker = new Worker( + this.queue.name, + async (job: Job) => { + const start = new Date().getTime() + logger.info(`[BaseQueue] Processing job ${job.id} in ${this.queue.name} at ${new Date().toISOString()}`) + try { + const result = await this.processJob(job.data) + const end = new Date().getTime() + logger.info( + `[BaseQueue] Completed job ${job.id} in ${this.queue.name} at ${new Date().toISOString()} (${end - start}ms)` + ) + return result + } catch (error) { + const end = new Date().getTime() + logger.error( + `[BaseQueue] Job ${job.id} failed in ${this.queue.name} at ${new Date().toISOString()} (${end - start}ms):`, + { error } + ) + throw error + } + }, + { + connection: this.connection, + concurrency + } + ) + + // Add error listeners to the worker + this.worker.on('error', (err) => { + logger.error(`[BaseQueue] Worker error for queue "${this.queue.name}":`, { error: err }) + }) + + this.worker.on('closed', () => { + logger.info(`[BaseQueue] Worker closed for queue "${this.queue.name}"`) + }) + + this.worker.on('failed', (job, err) => { + logger.error(`[BaseQueue] Worker job ${job?.id} failed in queue "${this.queue.name}":`, { error: err }) + }) + + logger.info(`[BaseQueue] Worker created successfully for queue "${this.queue.name}"`) + return this.worker + } catch (error) { + logger.error(`[BaseQueue] Failed to create worker for queue "${this.queue.name}":`, { error }) + throw error + } } public async getJobs(): Promise { diff --git a/packages/server/src/queue/PredictionQueue.ts b/packages/server/src/queue/PredictionQueue.ts index 2b5575712..10cc125f7 100644 --- a/packages/server/src/queue/PredictionQueue.ts +++ b/packages/server/src/queue/PredictionQueue.ts @@ -7,6 +7,7 @@ import { RedisEventPublisher } from './RedisEventPublisher' import { AbortControllerPool } from '../AbortControllerPool' import { BaseQueue } from './BaseQueue' import { RedisOptions } from 'bullmq' +import { UsageCacheManager } from '../UsageCacheManager' import logger from '../utils/logger' import { generateAgentflowv2 as generateAgentflowv2_json } from 'flowise-components' import { databaseEntities } from '../utils' @@ -18,6 +19,7 @@ interface PredictionQueueOptions { cachePool: CachePool componentNodes: IComponentNodes abortControllerPool: AbortControllerPool + usageCacheManager: UsageCacheManager } interface IGenerateAgentflowv2Params extends IExecuteFlowParams { @@ -35,6 +37,7 @@ export class PredictionQueue extends BaseQueue { private cachePool: CachePool private appDataSource: DataSource private abortControllerPool: AbortControllerPool + private usageCacheManager: UsageCacheManager private redisPublisher: RedisEventPublisher private queueName: string @@ -46,6 +49,7 @@ export class PredictionQueue extends BaseQueue { this.cachePool = options.cachePool this.appDataSource = options.appDataSource this.abortControllerPool = options.abortControllerPool + this.usageCacheManager = options.usageCacheManager this.redisPublisher = new RedisEventPublisher() this.redisPublisher.connect() } @@ -62,6 +66,7 @@ export class PredictionQueue extends BaseQueue { if (this.appDataSource) data.appDataSource = this.appDataSource if (this.telemetry) data.telemetry = this.telemetry if (this.cachePool) data.cachePool = this.cachePool + if (this.usageCacheManager) data.usageCacheManager = this.usageCacheManager if (this.componentNodes) data.componentNodes = this.componentNodes if (this.redisPublisher) data.sseStreamer = this.redisPublisher @@ -78,11 +83,13 @@ export class PredictionQueue extends BaseQueue { if (Object.prototype.hasOwnProperty.call(data, 'isExecuteCustomFunction')) { const executeCustomFunctionData = data as any - logger.info(`Executing Custom Function...`) + logger.info(`[${executeCustomFunctionData.orgId}]: Executing Custom Function...`) return await executeCustomNodeFunction({ appDataSource: this.appDataSource, componentNodes: this.componentNodes, - data: executeCustomFunctionData.data + data: executeCustomFunctionData.data, + workspaceId: executeCustomFunctionData.workspaceId, + orgId: executeCustomFunctionData.orgId }) } diff --git a/packages/server/src/queue/QueueManager.ts b/packages/server/src/queue/QueueManager.ts index abd657ac6..eef90b33b 100644 --- a/packages/server/src/queue/QueueManager.ts +++ b/packages/server/src/queue/QueueManager.ts @@ -7,9 +7,11 @@ import { CachePool } from '../CachePool' import { DataSource } from 'typeorm' import { AbortControllerPool } from '../AbortControllerPool' import { QueueEventsProducer, RedisOptions } from 'bullmq' -import { createBullBoard } from 'bull-board' -import { BullMQAdapter } from 'bull-board/bullMQAdapter' +import { createBullBoard } from '@bull-board/api' +import { BullMQAdapter } from '@bull-board/api/bullMQAdapter' import { Express } from 'express' +import { UsageCacheManager } from '../UsageCacheManager' +import { ExpressAdapter } from '@bull-board/express' const QUEUE_NAME = process.env.QUEUE_NAME || 'flowise-queue' @@ -23,30 +25,49 @@ export class QueueManager { private predictionQueueEventsProducer?: QueueEventsProducer private constructor() { - let tlsOpts = undefined - if (process.env.REDIS_URL && process.env.REDIS_URL.startsWith('rediss://')) { - tlsOpts = { - rejectUnauthorized: false + if (process.env.REDIS_URL) { + let tlsOpts = undefined + if (process.env.REDIS_URL.startsWith('rediss://')) { + tlsOpts = { + rejectUnauthorized: false + } + } else if (process.env.REDIS_TLS === 'true') { + tlsOpts = { + cert: process.env.REDIS_CERT ? Buffer.from(process.env.REDIS_CERT, 'base64') : undefined, + key: process.env.REDIS_KEY ? Buffer.from(process.env.REDIS_KEY, 'base64') : undefined, + ca: process.env.REDIS_CA ? Buffer.from(process.env.REDIS_CA, 'base64') : undefined + } } - } else if (process.env.REDIS_TLS === 'true') { - tlsOpts = { - cert: process.env.REDIS_CERT ? Buffer.from(process.env.REDIS_CERT, 'base64') : undefined, - key: process.env.REDIS_KEY ? Buffer.from(process.env.REDIS_KEY, 'base64') : undefined, - ca: process.env.REDIS_CA ? Buffer.from(process.env.REDIS_CA, 'base64') : undefined + this.connection = { + url: process.env.REDIS_URL, + tls: tlsOpts, + enableReadyCheck: true, + keepAlive: + process.env.REDIS_KEEP_ALIVE && !isNaN(parseInt(process.env.REDIS_KEEP_ALIVE, 10)) + ? parseInt(process.env.REDIS_KEEP_ALIVE, 10) + : undefined + } + } else { + let tlsOpts = undefined + if (process.env.REDIS_TLS === 'true') { + tlsOpts = { + cert: process.env.REDIS_CERT ? Buffer.from(process.env.REDIS_CERT, 'base64') : undefined, + key: process.env.REDIS_KEY ? Buffer.from(process.env.REDIS_KEY, 'base64') : undefined, + ca: process.env.REDIS_CA ? Buffer.from(process.env.REDIS_CA, 'base64') : undefined + } + } + this.connection = { + host: process.env.REDIS_HOST || 'localhost', + port: parseInt(process.env.REDIS_PORT || '6379'), + username: process.env.REDIS_USERNAME || undefined, + password: process.env.REDIS_PASSWORD || undefined, + tls: tlsOpts, + enableReadyCheck: true, + keepAlive: + process.env.REDIS_KEEP_ALIVE && !isNaN(parseInt(process.env.REDIS_KEEP_ALIVE, 10)) + ? parseInt(process.env.REDIS_KEEP_ALIVE, 10) + : undefined } - } - this.connection = { - url: process.env.REDIS_URL || undefined, - host: process.env.REDIS_HOST || 'localhost', - port: parseInt(process.env.REDIS_PORT || '6379'), - username: process.env.REDIS_USERNAME || undefined, - password: process.env.REDIS_PASSWORD || undefined, - tls: tlsOpts, - enableReadyCheck: true, - keepAlive: - process.env.REDIS_KEEP_ALIVE && !isNaN(parseInt(process.env.REDIS_KEEP_ALIVE, 10)) - ? parseInt(process.env.REDIS_KEEP_ALIVE, 10) - : undefined } } @@ -96,13 +117,17 @@ export class QueueManager { telemetry, cachePool, appDataSource, - abortControllerPool + abortControllerPool, + usageCacheManager, + serverAdapter }: { componentNodes: IComponentNodes telemetry: Telemetry cachePool: CachePool appDataSource: DataSource abortControllerPool: AbortControllerPool + usageCacheManager: UsageCacheManager + serverAdapter?: ExpressAdapter }) { const predictionQueueName = `${QUEUE_NAME}-prediction` const predictionQueue = new PredictionQueue(predictionQueueName, this.connection, { @@ -110,9 +135,11 @@ export class QueueManager { telemetry, cachePool, appDataSource, - abortControllerPool + abortControllerPool, + usageCacheManager }) this.registerQueue('prediction', predictionQueue) + this.predictionQueueEventsProducer = new QueueEventsProducer(predictionQueue.getQueueName(), { connection: this.connection }) @@ -122,11 +149,17 @@ export class QueueManager { componentNodes, telemetry, cachePool, - appDataSource + appDataSource, + usageCacheManager }) this.registerQueue('upsert', upsertionQueue) - const bullboard = createBullBoard([new BullMQAdapter(predictionQueue.getQueue()), new BullMQAdapter(upsertionQueue.getQueue())]) - this.bullBoardRouter = bullboard.router + if (serverAdapter) { + createBullBoard({ + queues: [new BullMQAdapter(predictionQueue.getQueue()), new BullMQAdapter(upsertionQueue.getQueue())], + serverAdapter: serverAdapter + }) + this.bullBoardRouter = serverAdapter.getRouter() + } } } diff --git a/packages/server/src/queue/RedisEventPublisher.ts b/packages/server/src/queue/RedisEventPublisher.ts index c0fce60c5..cb8aa6ecf 100644 --- a/packages/server/src/queue/RedisEventPublisher.ts +++ b/packages/server/src/queue/RedisEventPublisher.ts @@ -1,5 +1,6 @@ import { IServerSideEventStreamer } from 'flowise-components' import { createClient } from 'redis' +import logger from '../utils/logger' export class RedisEventPublisher implements IServerSideEventStreamer { private redisPublisher: ReturnType @@ -13,7 +14,11 @@ export class RedisEventPublisher implements IServerSideEventStreamer { process.env.REDIS_KEEP_ALIVE && !isNaN(parseInt(process.env.REDIS_KEEP_ALIVE, 10)) ? parseInt(process.env.REDIS_KEEP_ALIVE, 10) : undefined - } + }, + pingInterval: + process.env.REDIS_KEEP_ALIVE && !isNaN(parseInt(process.env.REDIS_KEEP_ALIVE, 10)) + ? parseInt(process.env.REDIS_KEEP_ALIVE, 10) + : undefined }) } else { this.redisPublisher = createClient({ @@ -30,9 +35,45 @@ export class RedisEventPublisher implements IServerSideEventStreamer { process.env.REDIS_KEEP_ALIVE && !isNaN(parseInt(process.env.REDIS_KEEP_ALIVE, 10)) ? parseInt(process.env.REDIS_KEEP_ALIVE, 10) : undefined - } + }, + pingInterval: + process.env.REDIS_KEEP_ALIVE && !isNaN(parseInt(process.env.REDIS_KEEP_ALIVE, 10)) + ? parseInt(process.env.REDIS_KEEP_ALIVE, 10) + : undefined }) } + + this.setupEventListeners() + } + + private setupEventListeners() { + this.redisPublisher.on('connect', () => { + logger.info(`[RedisEventPublisher] Redis client connecting...`) + }) + + this.redisPublisher.on('ready', () => { + logger.info(`[RedisEventPublisher] Redis client ready and connected`) + }) + + this.redisPublisher.on('error', (err) => { + logger.error(`[RedisEventPublisher] Redis client error:`, { + error: err, + isReady: this.redisPublisher.isReady, + isOpen: this.redisPublisher.isOpen + }) + }) + + this.redisPublisher.on('end', () => { + logger.warn(`[RedisEventPublisher] Redis client connection ended`) + }) + + this.redisPublisher.on('reconnecting', () => { + logger.info(`[RedisEventPublisher] Redis client reconnecting...`) + }) + } + + isConnected() { + return this.redisPublisher.isReady } async connect() { @@ -339,6 +380,70 @@ export class RedisEventPublisher implements IServerSideEventStreamer { } } + streamTTSStartEvent(chatId: string, chatMessageId: string, format: string): void { + try { + this.redisPublisher.publish( + chatId, + JSON.stringify({ + chatId, + chatMessageId, + eventType: 'tts_start', + data: { format } + }) + ) + } catch (error) { + console.error('Error streaming TTS start event:', error) + } + } + + streamTTSDataEvent(chatId: string, chatMessageId: string, audioChunk: string): void { + try { + this.redisPublisher.publish( + chatId, + JSON.stringify({ + chatId, + chatMessageId, + eventType: 'tts_data', + data: audioChunk + }) + ) + } catch (error) { + console.error('Error streaming TTS data event:', error) + } + } + + streamTTSEndEvent(chatId: string, chatMessageId: string): void { + try { + this.redisPublisher.publish( + chatId, + JSON.stringify({ + chatId, + chatMessageId, + eventType: 'tts_end', + data: {} + }) + ) + } catch (error) { + console.error('Error streaming TTS end event:', error) + } + } + + streamTTSAbortEvent(chatId: string, chatMessageId: string): void { + try { + this.redisPublisher.publish( + chatId, + JSON.stringify({ + chatId, + chatMessageId, + eventType: 'tts_abort', + data: {} + }) + ) + } catch (error) { + console.error('Error streaming TTS abort event:', error) + } + } + async disconnect() { if (this.redisPublisher) { await this.redisPublisher.quit() diff --git a/packages/server/src/queue/RedisEventSubscriber.ts b/packages/server/src/queue/RedisEventSubscriber.ts index 5b0331a72..c70d6f732 100644 --- a/packages/server/src/queue/RedisEventSubscriber.ts +++ b/packages/server/src/queue/RedisEventSubscriber.ts @@ -1,5 +1,6 @@ import { createClient } from 'redis' import { SSEStreamer } from '../utils/SSEStreamer' +import logger from '../utils/logger' export class RedisEventSubscriber { private redisSubscriber: ReturnType @@ -15,7 +16,11 @@ export class RedisEventSubscriber { process.env.REDIS_KEEP_ALIVE && !isNaN(parseInt(process.env.REDIS_KEEP_ALIVE, 10)) ? parseInt(process.env.REDIS_KEEP_ALIVE, 10) : undefined - } + }, + pingInterval: + process.env.REDIS_KEEP_ALIVE && !isNaN(parseInt(process.env.REDIS_KEEP_ALIVE, 10)) + ? parseInt(process.env.REDIS_KEEP_ALIVE, 10) + : undefined }) } else { this.redisSubscriber = createClient({ @@ -32,10 +37,43 @@ export class RedisEventSubscriber { process.env.REDIS_KEEP_ALIVE && !isNaN(parseInt(process.env.REDIS_KEEP_ALIVE, 10)) ? parseInt(process.env.REDIS_KEEP_ALIVE, 10) : undefined - } + }, + pingInterval: + process.env.REDIS_KEEP_ALIVE && !isNaN(parseInt(process.env.REDIS_KEEP_ALIVE, 10)) + ? parseInt(process.env.REDIS_KEEP_ALIVE, 10) + : undefined }) } this.sseStreamer = sseStreamer + + this.setupEventListeners() + } + + private setupEventListeners() { + this.redisSubscriber.on('connect', () => { + logger.info(`[RedisEventSubscriber] Redis client connecting...`) + }) + + this.redisSubscriber.on('ready', () => { + logger.info(`[RedisEventSubscriber] Redis client ready and connected`) + }) + + this.redisSubscriber.on('error', (err) => { + logger.error(`[RedisEventSubscriber] Redis client error:`, { + error: err, + isReady: this.redisSubscriber.isReady, + isOpen: this.redisSubscriber.isOpen, + subscribedChannelsCount: this.subscribedChannels.size + }) + }) + + this.redisSubscriber.on('end', () => { + logger.warn(`[RedisEventSubscriber] Redis client connection ended`) + }) + + this.redisSubscriber.on('reconnecting', () => { + logger.info(`[RedisEventSubscriber] Redis client reconnecting...`) + }) } async connect() { @@ -64,7 +102,7 @@ export class RedisEventSubscriber { private handleEvent(message: string) { // Parse the message from Redis const event = JSON.parse(message) - const { eventType, chatId, data } = event + const { eventType, chatId, chatMessageId, data } = event // Stream the event to the client switch (eventType) { @@ -83,6 +121,9 @@ export class RedisEventSubscriber { case 'usedTools': this.sseStreamer.streamUsedToolsEvent(chatId, data) break + case 'calledTools': + this.sseStreamer.streamCalledToolsEvent(chatId, data) + break case 'fileAnnotations': this.sseStreamer.streamFileAnnotationsEvent(chatId, data) break @@ -116,6 +157,21 @@ export class RedisEventSubscriber { case 'metadata': this.sseStreamer.streamMetadataEvent(chatId, data) break + case 'usageMetadata': + this.sseStreamer.streamUsageMetadataEvent(chatId, data) + break + case 'tts_start': + this.sseStreamer.streamTTSStartEvent(chatId, chatMessageId, data.format) + break + case 'tts_data': + this.sseStreamer.streamTTSDataEvent(chatId, chatMessageId, data) + break + case 'tts_end': + this.sseStreamer.streamTTSEndEvent(chatId, chatMessageId) + break + case 'tts_abort': + this.sseStreamer.streamTTSAbortEvent(chatId, chatMessageId) + break } } diff --git a/packages/server/src/queue/UpsertQueue.ts b/packages/server/src/queue/UpsertQueue.ts index 451c413e7..de1a0e7db 100644 --- a/packages/server/src/queue/UpsertQueue.ts +++ b/packages/server/src/queue/UpsertQueue.ts @@ -14,11 +14,13 @@ import { executeUpsert } from '../utils/upsertVector' import { executeDocStoreUpsert, insertIntoVectorStore, previewChunks, processLoader } from '../services/documentstore' import { RedisOptions } from 'bullmq' import logger from '../utils/logger' +import { UsageCacheManager } from '../UsageCacheManager' interface UpsertQueueOptions { appDataSource: DataSource telemetry: Telemetry cachePool: CachePool + usageCacheManager: UsageCacheManager componentNodes: IComponentNodes } @@ -27,6 +29,7 @@ export class UpsertQueue extends BaseQueue { private telemetry: Telemetry private cachePool: CachePool private appDataSource: DataSource + private usageCacheManager: UsageCacheManager private queueName: string constructor(name: string, connection: RedisOptions, options: UpsertQueueOptions) { @@ -36,6 +39,7 @@ export class UpsertQueue extends BaseQueue { this.telemetry = options.telemetry this.cachePool = options.cachePool this.appDataSource = options.appDataSource + this.usageCacheManager = options.usageCacheManager } public getQueueName() { @@ -52,6 +56,7 @@ export class UpsertQueue extends BaseQueue { if (this.appDataSource) data.appDataSource = this.appDataSource if (this.telemetry) data.telemetry = this.telemetry if (this.cachePool) data.cachePool = this.cachePool + if (this.usageCacheManager) data.usageCacheManager = this.usageCacheManager if (this.componentNodes) data.componentNodes = this.componentNodes // document-store/loader/preview diff --git a/packages/server/src/routes/apikey/index.ts b/packages/server/src/routes/apikey/index.ts index dbc043dd5..ec9f1a2c9 100644 --- a/packages/server/src/routes/apikey/index.ts +++ b/packages/server/src/routes/apikey/index.ts @@ -1,18 +1,19 @@ import express from 'express' import apikeyController from '../../controllers/apikey' +import { checkAnyPermission, checkPermission } from '../../enterprise/rbac/PermissionCheck' const router = express.Router() // CREATE -router.post('/', apikeyController.createApiKey) -router.post('/import', apikeyController.importKeys) +router.post('/', checkPermission('apikeys:create'), apikeyController.createApiKey) +router.post('/import', checkPermission('apikeys:import'), apikeyController.importKeys) // READ -router.get('/', apikeyController.getAllApiKeys) +router.get('/', checkPermission('apikeys:view'), apikeyController.getAllApiKeys) // UPDATE -router.put(['/', '/:id'], apikeyController.updateApiKey) +router.put(['/', '/:id'], checkAnyPermission('apikeys:create,apikeys:update'), apikeyController.updateApiKey) // DELETE -router.delete(['/', '/:id'], apikeyController.deleteApiKey) +router.delete(['/', '/:id'], checkPermission('apikeys:delete'), apikeyController.deleteApiKey) export default router diff --git a/packages/server/src/routes/assistants/index.ts b/packages/server/src/routes/assistants/index.ts index f7754bcac..5599e8954 100644 --- a/packages/server/src/routes/assistants/index.ts +++ b/packages/server/src/routes/assistants/index.ts @@ -1,20 +1,21 @@ import express from 'express' import assistantsController from '../../controllers/assistants' +import { checkPermission, checkAnyPermission } from '../../enterprise/rbac/PermissionCheck' const router = express.Router() // CREATE -router.post('/', assistantsController.createAssistant) +router.post('/', checkPermission('assistants:create'), assistantsController.createAssistant) // READ -router.get('/', assistantsController.getAllAssistants) -router.get(['/', '/:id'], assistantsController.getAssistantById) +router.get('/', checkPermission('assistants:view'), assistantsController.getAllAssistants) +router.get(['/', '/:id'], checkPermission('assistants:view'), assistantsController.getAssistantById) // UPDATE -router.put(['/', '/:id'], assistantsController.updateAssistant) +router.put(['/', '/:id'], checkAnyPermission('assistants:create,assistants:update'), assistantsController.updateAssistant) // DELETE -router.delete(['/', '/:id'], assistantsController.deleteAssistant) +router.delete(['/', '/:id'], checkPermission('assistants:delete'), assistantsController.deleteAssistant) router.get('/components/chatmodels', assistantsController.getChatModels) router.get('/components/docstores', assistantsController.getDocumentStores) diff --git a/packages/server/src/routes/chat-messages/index.ts b/packages/server/src/routes/chat-messages/index.ts index ca90abcf7..efea8da9c 100644 --- a/packages/server/src/routes/chat-messages/index.ts +++ b/packages/server/src/routes/chat-messages/index.ts @@ -3,7 +3,8 @@ import chatMessageController from '../../controllers/chat-messages' const router = express.Router() // CREATE -router.post(['/', '/:id'], chatMessageController.createChatMessage) +// NOTE: Unused route +// router.post(['/', '/:id'], chatMessageController.createChatMessage) // READ router.get(['/', '/:id'], chatMessageController.getAllChatMessages) diff --git a/packages/server/src/routes/chatflows/index.ts b/packages/server/src/routes/chatflows/index.ts index b0c535067..5d2ec2609 100644 --- a/packages/server/src/routes/chatflows/index.ts +++ b/packages/server/src/routes/chatflows/index.ts @@ -1,20 +1,43 @@ import express from 'express' import chatflowsController from '../../controllers/chatflows' +import { checkAnyPermission } from '../../enterprise/rbac/PermissionCheck' const router = express.Router() // CREATE -router.post('/', chatflowsController.saveChatflow) -router.post('/importchatflows', chatflowsController.importChatflows) +router.post( + '/', + checkAnyPermission('chatflows:create,chatflows:update,agentflows:create,agentflows:update'), + chatflowsController.saveChatflow +) // READ -router.get('/', chatflowsController.getAllChatflows) -router.get(['/', '/:id'], chatflowsController.getChatflowById) +router.get( + '/', + checkAnyPermission('chatflows:view,chatflows:update,agentflows:view,agentflows:update'), + chatflowsController.getAllChatflows +) +router.get( + ['/', '/:id'], + checkAnyPermission('chatflows:view,chatflows:update,chatflows:delete,agentflows:view,agentflows:update,agentflows:delete'), + chatflowsController.getChatflowById +) router.get(['/apikey/', '/apikey/:apikey'], chatflowsController.getChatflowByApiKey) // UPDATE -router.put(['/', '/:id'], chatflowsController.updateChatflow) +router.put( + ['/', '/:id'], + checkAnyPermission('chatflows:create,chatflows:update,agentflows:create,agentflows:update'), + chatflowsController.updateChatflow +) // DELETE -router.delete(['/', '/:id'], chatflowsController.deleteChatflow) +router.delete(['/', '/:id'], checkAnyPermission('chatflows:delete,agentflows:delete'), chatflowsController.deleteChatflow) + +// CHECK FOR CHANGE +router.get( + '/has-changed/:id/:lastUpdatedDateTime', + checkAnyPermission('chatflows:update,agentflows:update'), + chatflowsController.checkIfChatflowHasChanged +) export default router diff --git a/packages/server/src/routes/credentials/index.ts b/packages/server/src/routes/credentials/index.ts index 9f118b49d..6e97547db 100644 --- a/packages/server/src/routes/credentials/index.ts +++ b/packages/server/src/routes/credentials/index.ts @@ -1,18 +1,19 @@ import express from 'express' import credentialsController from '../../controllers/credentials' +import { checkPermission, checkAnyPermission } from '../../enterprise/rbac/PermissionCheck' const router = express.Router() // CREATE -router.post('/', credentialsController.createCredential) +router.post('/', checkPermission('credentials:create'), credentialsController.createCredential) // READ -router.get('/', credentialsController.getAllCredentials) -router.get(['/', '/:id'], credentialsController.getCredentialById) +router.get('/', checkPermission('credentials:view'), credentialsController.getAllCredentials) +router.get(['/', '/:id'], checkPermission('credentials:view'), credentialsController.getCredentialById) // UPDATE -router.put(['/', '/:id'], credentialsController.updateCredential) +router.put(['/', '/:id'], checkAnyPermission('credentials:create,credentials:update'), credentialsController.updateCredential) // DELETE -router.delete(['/', '/:id'], credentialsController.deleteCredentials) +router.delete(['/', '/:id'], checkPermission('credentials:delete'), credentialsController.deleteCredentials) export default router diff --git a/packages/server/src/routes/dataset/index.ts b/packages/server/src/routes/dataset/index.ts new file mode 100644 index 000000000..7b73e546a --- /dev/null +++ b/packages/server/src/routes/dataset/index.ts @@ -0,0 +1,29 @@ +import express from 'express' +import datasetController from '../../controllers/dataset' +import { checkAnyPermission, checkPermission } from '../../enterprise/rbac/PermissionCheck' +const router = express.Router() + +// get all datasets +router.get('/', checkPermission('datasets:view'), datasetController.getAllDatasets) +// get new dataset +router.get(['/set', '/set/:id'], checkPermission('datasets:view'), datasetController.getDataset) +// Create new dataset +router.post(['/set', '/set/:id'], checkPermission('datasets:create'), datasetController.createDataset) +// Update dataset +router.put(['/set', '/set/:id'], checkAnyPermission('datasets:create,datasets:update'), datasetController.updateDataset) +// Delete dataset via id +router.delete(['/set', '/set/:id'], checkPermission('datasets:delete'), datasetController.deleteDataset) + +// Create new row in a given dataset +router.post(['/rows', '/rows/:id'], checkPermission('datasets:create'), datasetController.addDatasetRow) +// Update row for a dataset +router.put(['/rows', '/rows/:id'], checkAnyPermission('datasets:create,datasets:update'), datasetController.updateDatasetRow) +// Delete dataset row via id +router.delete(['/rows', '/rows/:id'], checkPermission('datasets:delete'), datasetController.deleteDatasetRow) +// PATCH delete by ids +router.patch('/rows', checkPermission('datasets:delete'), datasetController.patchDeleteRows) + +// Update row for a dataset +router.post(['/reorder', '/reorder'], checkAnyPermission('datasets:create,datasets:update'), datasetController.reorderDatasetRow) + +export default router diff --git a/packages/server/src/routes/documentstore/index.ts b/packages/server/src/routes/documentstore/index.ts index 6fb911941..41ce33bb8 100644 --- a/packages/server/src/routes/documentstore/index.ts +++ b/packages/server/src/routes/documentstore/index.ts @@ -1,4 +1,5 @@ import express from 'express' +import { checkPermission, checkAnyPermission } from '../../enterprise/rbac/PermissionCheck' import documentStoreController from '../../controllers/documentstore' import { getMulterStorage } from '../../utils' @@ -10,56 +11,72 @@ router.post(['/refresh/', '/refresh/:id'], documentStoreController.refreshDocSto /** Document Store Routes */ // Create document store -router.post('/store', documentStoreController.createDocumentStore) +router.post('/store', checkPermission('documentStores:create'), documentStoreController.createDocumentStore) // List all stores -router.get('/store', documentStoreController.getAllDocumentStores) +router.get('/store', checkPermission('documentStores:view'), documentStoreController.getAllDocumentStores) // Get specific store -router.get('/store/:id', documentStoreController.getDocumentStoreById) +router.get( + '/store/:id', + checkAnyPermission('documentStores:view,documentStores:update,documentStores:delete'), + documentStoreController.getDocumentStoreById +) // Update documentStore -router.put('/store/:id', documentStoreController.updateDocumentStore) +router.put('/store/:id', checkAnyPermission('documentStores:create,documentStores:update'), documentStoreController.updateDocumentStore) // Delete documentStore -router.delete('/store/:id', documentStoreController.deleteDocumentStore) +router.delete('/store/:id', checkPermission('documentStores:delete'), documentStoreController.deleteDocumentStore) // Get document store configs -router.get('/store-configs/:id/:loaderId', documentStoreController.getDocStoreConfigs) +router.get('/store-configs/:id/:loaderId', checkAnyPermission('documentStores:view'), documentStoreController.getDocStoreConfigs) /** Component Nodes = Document Store - Loaders */ // Get all loaders -router.get('/components/loaders', documentStoreController.getDocumentLoaders) +router.get('/components/loaders', checkPermission('documentStores:add-loader'), documentStoreController.getDocumentLoaders) // delete loader from document store -router.delete('/loader/:id/:loaderId', documentStoreController.deleteLoaderFromDocumentStore) +router.delete( + '/loader/:id/:loaderId', + checkPermission('documentStores:delete-loader'), + documentStoreController.deleteLoaderFromDocumentStore +) // chunking preview -router.post('/loader/preview', documentStoreController.previewFileChunks) +router.post('/loader/preview', checkPermission('documentStores:preview-process'), documentStoreController.previewFileChunks) // saving process -router.post('/loader/save', documentStoreController.saveProcessingLoader) +router.post('/loader/save', checkPermission('documentStores:preview-process'), documentStoreController.saveProcessingLoader) // chunking process -router.post('/loader/process/:loaderId', documentStoreController.processLoader) +router.post('/loader/process/:loaderId', checkPermission('documentStores:preview-process'), documentStoreController.processLoader) /** Document Store - Loaders - Chunks */ // delete specific file chunk from the store -router.delete('/chunks/:storeId/:loaderId/:chunkId', documentStoreController.deleteDocumentStoreFileChunk) +router.delete( + '/chunks/:storeId/:loaderId/:chunkId', + checkAnyPermission('documentStores:update,documentStores:delete'), + documentStoreController.deleteDocumentStoreFileChunk +) // edit specific file chunk from the store -router.put('/chunks/:storeId/:loaderId/:chunkId', documentStoreController.editDocumentStoreFileChunk) +router.put( + '/chunks/:storeId/:loaderId/:chunkId', + checkPermission('documentStores:update'), + documentStoreController.editDocumentStoreFileChunk +) // Get all file chunks from the store -router.get('/chunks/:storeId/:fileId/:pageNo', documentStoreController.getDocumentStoreFileChunks) +router.get('/chunks/:storeId/:fileId/:pageNo', checkPermission('documentStores:view'), documentStoreController.getDocumentStoreFileChunks) // add chunks to the selected vector store -router.post('/vectorstore/insert', documentStoreController.insertIntoVectorStore) +router.post('/vectorstore/insert', checkPermission('documentStores:upsert-config'), documentStoreController.insertIntoVectorStore) // save the selected vector store -router.post('/vectorstore/save', documentStoreController.saveVectorStoreConfig) +router.post('/vectorstore/save', checkPermission('documentStores:upsert-config'), documentStoreController.saveVectorStoreConfig) // delete data from the selected vector store -router.delete('/vectorstore/:storeId', documentStoreController.deleteVectorStoreFromStore) +router.delete('/vectorstore/:storeId', checkPermission('documentStores:upsert-config'), documentStoreController.deleteVectorStoreFromStore) // query the vector store -router.post('/vectorstore/query', documentStoreController.queryVectorStore) +router.post('/vectorstore/query', checkPermission('documentStores:view'), documentStoreController.queryVectorStore) // Get all embedding providers -router.get('/components/embeddings', documentStoreController.getEmbeddingProviders) +router.get('/components/embeddings', checkPermission('documentStores:upsert-config'), documentStoreController.getEmbeddingProviders) // Get all vector store providers -router.get('/components/vectorstore', documentStoreController.getVectorStoreProviders) +router.get('/components/vectorstore', checkPermission('documentStores:upsert-config'), documentStoreController.getVectorStoreProviders) // Get all Record Manager providers -router.get('/components/recordmanager', documentStoreController.getRecordManagerProviders) +router.get('/components/recordmanager', checkPermission('documentStores:upsert-config'), documentStoreController.getRecordManagerProviders) // update the selected vector store from the playground -router.post('/vectorstore/update', documentStoreController.updateVectorStoreConfigOnly) +router.post('/vectorstore/update', checkPermission('documentStores:upsert-config'), documentStoreController.updateVectorStoreConfigOnly) // generate docstore tool description router.post('/generate-tool-desc/:id', documentStoreController.generateDocStoreToolDesc) diff --git a/packages/server/src/routes/evaluations/index.ts b/packages/server/src/routes/evaluations/index.ts new file mode 100644 index 000000000..14bb7c7fa --- /dev/null +++ b/packages/server/src/routes/evaluations/index.ts @@ -0,0 +1,14 @@ +import express from 'express' +import evaluationsController from '../../controllers/evaluations' +import { checkPermission, checkAnyPermission } from '../../enterprise/rbac/PermissionCheck' +const router = express.Router() + +router.get('/', checkPermission('evaluations:view'), evaluationsController.getAllEvaluations) +router.get('/:id', checkPermission('evaluations:view'), evaluationsController.getEvaluation) +router.delete('/:id', checkPermission('evaluations:delete'), evaluationsController.deleteEvaluation) +router.post('/', checkPermission('evaluations:create'), evaluationsController.createEvaluation) +router.get('/is-outdated/:id', evaluationsController.isOutdated) +router.post('/run-again/:id', checkAnyPermission('evaluations:create,evaluations:run'), evaluationsController.runAgain) +router.get('/versions/:id', checkPermission('evaluations:view'), evaluationsController.getVersions) +router.patch('/', checkPermission('evaluations:delete'), evaluationsController.patchDeleteEvaluations) +export default router diff --git a/packages/server/src/routes/evaluator/index.ts b/packages/server/src/routes/evaluator/index.ts new file mode 100644 index 000000000..481cbaf8e --- /dev/null +++ b/packages/server/src/routes/evaluator/index.ts @@ -0,0 +1,17 @@ +import express from 'express' +import evaluatorsController from '../../controllers/evaluators' +import { checkPermission, checkAnyPermission } from '../../enterprise/rbac/PermissionCheck' +const router = express.Router() + +// get all datasets +router.get('/', checkPermission('evaluators:view'), evaluatorsController.getAllEvaluators) +// get new dataset +router.get(['/', '/:id'], checkPermission('evaluators:view'), evaluatorsController.getEvaluator) +// Create new dataset +router.post(['/', '/:id'], checkPermission('evaluators:create'), evaluatorsController.createEvaluator) +// Update dataset +router.put(['/', '/:id'], checkAnyPermission('evaluators:create,evaluators:update'), evaluatorsController.updateEvaluator) +// Delete dataset via id +router.delete(['/', '/:id'], checkPermission('evaluators:delete'), evaluatorsController.deleteEvaluator) + +export default router diff --git a/packages/server/src/routes/executions/index.ts b/packages/server/src/routes/executions/index.ts index 589bbf13a..6106613a1 100644 --- a/packages/server/src/routes/executions/index.ts +++ b/packages/server/src/routes/executions/index.ts @@ -1,16 +1,17 @@ import express from 'express' import executionController from '../../controllers/executions' +import { checkAnyPermission } from '../../enterprise/rbac/PermissionCheck' const router = express.Router() // READ -router.get('/', executionController.getAllExecutions) -router.get(['/', '/:id'], executionController.getExecutionById) +router.get('/', checkAnyPermission('executions:view'), executionController.getAllExecutions) +router.get(['/', '/:id'], checkAnyPermission('executions:view'), executionController.getExecutionById) // PUT router.put(['/', '/:id'], executionController.updateExecution) // DELETE - single execution or multiple executions -router.delete('/:id', executionController.deleteExecutions) -router.delete('/', executionController.deleteExecutions) +router.delete('/:id', checkAnyPermission('executions:delete'), executionController.deleteExecutions) +router.delete('/', checkAnyPermission('executions:delete'), executionController.deleteExecutions) export default router diff --git a/packages/server/src/routes/export-import/index.ts b/packages/server/src/routes/export-import/index.ts index 40c3930d2..17b28a7c3 100644 --- a/packages/server/src/routes/export-import/index.ts +++ b/packages/server/src/routes/export-import/index.ts @@ -1,9 +1,10 @@ import express from 'express' import exportImportController from '../../controllers/export-import' +import { checkPermission } from '../../enterprise/rbac/PermissionCheck' const router = express.Router() -router.post('/export', exportImportController.exportData) +router.post('/export', checkPermission('workspace:export'), exportImportController.exportData) -router.post('/import', exportImportController.importData) +router.post('/import', checkPermission('workspace:import'), exportImportController.importData) export default router diff --git a/packages/server/src/routes/files/index.ts b/packages/server/src/routes/files/index.ts new file mode 100644 index 000000000..3a48183d4 --- /dev/null +++ b/packages/server/src/routes/files/index.ts @@ -0,0 +1,11 @@ +import express from 'express' +import filesController from '../../controllers/files' +const router = express.Router() + +// READ +router.get('/', filesController.getAllFiles) + +// DELETE +router.delete('/', filesController.deleteFile) + +export default router diff --git a/packages/server/src/routes/index.ts b/packages/server/src/routes/index.ts index 42a5d2312..4c4930f44 100644 --- a/packages/server/src/routes/index.ts +++ b/packages/server/src/routes/index.ts @@ -9,10 +9,14 @@ import chatflowsUploadsRouter from './chatflows-uploads' import componentsCredentialsRouter from './components-credentials' import componentsCredentialsIconRouter from './components-credentials-icon' import credentialsRouter from './credentials' +import datasetRouter from './dataset' import documentStoreRouter from './documentstore' +import evaluationsRouter from './evaluations' +import evaluatorsRouter from './evaluator' import exportImportRouter from './export-import' import feedbackRouter from './feedback' import fetchLinksRouter from './fetch-links' +import filesRouter from './files' import flowConfigRouter from './flow-config' import getUploadFileRouter from './get-upload-file' import getUploadPathRouter from './get-upload-path' @@ -20,12 +24,14 @@ import internalChatmessagesRouter from './internal-chat-messages' import internalPredictionRouter from './internal-predictions' import leadsRouter from './leads' import loadPromptRouter from './load-prompts' +import logsRouter from './log' import marketplacesRouter from './marketplaces' import nodeConfigRouter from './node-configs' import nodeCustomFunctionRouter from './node-custom-functions' import nodeIconRouter from './node-icons' import nodeLoadMethodRouter from './node-load-methods' import nodesRouter from './nodes' +import oauth2Router from './oauth2' import openaiAssistantsRouter from './openai-assistants' import openaiAssistantsFileRouter from './openai-assistants-files' import openaiAssistantsVectorStoreRouter from './openai-assistants-vector-store' @@ -36,6 +42,7 @@ import promptListsRouter from './prompts-lists' import publicChatbotRouter from './public-chatbots' import publicChatflowsRouter from './public-chatflows' import publicExecutionsRouter from './public-executions' +import settingsRouter from './settings' import statsRouter from './stats' import toolsRouter from './tools' import upsertHistoryRouter from './upsert-history' @@ -43,10 +50,24 @@ import variablesRouter from './variables' import vectorRouter from './vectors' import verifyRouter from './verify' import versionRouter from './versions' +import pricingRouter from './pricing' import nvidiaNimRouter from './nvidia-nim' import executionsRouter from './executions' import validationRouter from './validation' import agentflowv2GeneratorRouter from './agentflowv2-generator' +import textToSpeechRouter from './text-to-speech' + +import authRouter from '../enterprise/routes/auth' +import auditRouter from '../enterprise/routes/audit' +import userRouter from '../enterprise/routes/user.route' +import organizationRouter from '../enterprise/routes/organization.route' +import roleRouter from '../enterprise/routes/role.route' +import organizationUserRoute from '../enterprise/routes/organization-user.route' +import workspaceRouter from '../enterprise/routes/workspace.route' +import workspaceUserRouter from '../enterprise/routes/workspace-user.route' +import accountRouter from '../enterprise/routes/account.route' +import loginMethodRouter from '../enterprise/routes/login-method.route' +import { IdentityManager } from '../IdentityManager' const router = express.Router() @@ -57,11 +78,14 @@ router.use('/attachments', attachmentsRouter) router.use('/chatflows', chatflowsRouter) router.use('/chatflows-streaming', chatflowsStreamingRouter) router.use('/chatmessage', chatMessageRouter) +router.use('/chatflows-uploads', chatflowsUploadsRouter) router.use('/components-credentials', componentsCredentialsRouter) router.use('/components-credentials-icon', componentsCredentialsIconRouter) -router.use('/chatflows-uploads', chatflowsUploadsRouter) router.use('/credentials', credentialsRouter) +router.use('/datasets', IdentityManager.checkFeatureByPlan('feat:datasets'), datasetRouter) router.use('/document-store', documentStoreRouter) +router.use('/evaluations', IdentityManager.checkFeatureByPlan('feat:evaluations'), evaluationsRouter) +router.use('/evaluators', IdentityManager.checkFeatureByPlan('feat:evaluators'), evaluatorsRouter) router.use('/export-import', exportImportRouter) router.use('/feedback', feedbackRouter) router.use('/fetch-links', fetchLinksRouter) @@ -78,6 +102,7 @@ router.use('/node-custom-function', nodeCustomFunctionRouter) router.use('/node-icon', nodeIconRouter) router.use('/node-load-method', nodeLoadMethodRouter) router.use('/nodes', nodesRouter) +router.use('/oauth2-credential', oauth2Router) router.use('/openai-assistants', openaiAssistantsRouter) router.use('/openai-assistants-file', openaiAssistantsFileRouter) router.use('/openai-assistants-vector-store', openaiAssistantsVectorStoreRouter) @@ -94,9 +119,25 @@ router.use('/vector', vectorRouter) router.use('/verify', verifyRouter) router.use('/version', versionRouter) router.use('/upsert-history', upsertHistoryRouter) +router.use('/settings', settingsRouter) +router.use('/pricing', pricingRouter) router.use('/nvidia-nim', nvidiaNimRouter) router.use('/executions', executionsRouter) router.use('/validation', validationRouter) router.use('/agentflowv2-generator', agentflowv2GeneratorRouter) +router.use('/text-to-speech', textToSpeechRouter) + +router.use('/auth', authRouter) +router.use('/audit', IdentityManager.checkFeatureByPlan('feat:login-activity'), auditRouter) +router.use('/user', userRouter) +router.use('/organization', organizationRouter) +router.use('/role', IdentityManager.checkFeatureByPlan('feat:roles'), roleRouter) +router.use('/organizationuser', organizationUserRoute) +router.use('/workspace', workspaceRouter) +router.use('/workspaceuser', workspaceUserRouter) +router.use('/account', accountRouter) +router.use('/loginmethod', loginMethodRouter) +router.use('/logs', IdentityManager.checkFeatureByPlan('feat:logs'), logsRouter) +router.use('/files', IdentityManager.checkFeatureByPlan('feat:files'), filesRouter) export default router diff --git a/packages/server/src/routes/log/index.ts b/packages/server/src/routes/log/index.ts new file mode 100644 index 000000000..290004f5a --- /dev/null +++ b/packages/server/src/routes/log/index.ts @@ -0,0 +1,9 @@ +import express from 'express' +import logController from '../../controllers/log' +import { checkAnyPermission } from '../../enterprise/rbac/PermissionCheck' +const router = express.Router() + +// READ +router.get('/', checkAnyPermission('logs:view'), logController.getLogs) + +export default router diff --git a/packages/server/src/routes/marketplaces/index.ts b/packages/server/src/routes/marketplaces/index.ts index d97f96f38..050140358 100644 --- a/packages/server/src/routes/marketplaces/index.ts +++ b/packages/server/src/routes/marketplaces/index.ts @@ -1,16 +1,17 @@ import express from 'express' import marketplacesController from '../../controllers/marketplaces' +import { checkPermission, checkAnyPermission } from '../../enterprise/rbac/PermissionCheck' const router = express.Router() // READ -router.get('/templates', marketplacesController.getAllTemplates) +router.get('/templates', checkPermission('templates:marketplace'), marketplacesController.getAllTemplates) -router.post('/custom', marketplacesController.saveCustomTemplate) +router.post('/custom', checkAnyPermission('templates:flowexport,templates:toolexport'), marketplacesController.saveCustomTemplate) // READ -router.get('/custom', marketplacesController.getAllCustomTemplates) +router.get('/custom', checkPermission('templates:custom'), marketplacesController.getAllCustomTemplates) // DELETE -router.delete(['/', '/custom/:id'], marketplacesController.deleteCustomTemplate) +router.delete(['/', '/custom/:id'], checkPermission('templates:custom-delete'), marketplacesController.deleteCustomTemplate) export default router diff --git a/packages/server/src/routes/oauth2/index.ts b/packages/server/src/routes/oauth2/index.ts new file mode 100644 index 000000000..b5c5f571b --- /dev/null +++ b/packages/server/src/routes/oauth2/index.ts @@ -0,0 +1,422 @@ +/** + * OAuth2 Authorization Code Flow Implementation + * + * This module implements a complete OAuth2 authorization code flow for Flowise credentials. + * It supports Microsoft Graph and other OAuth2 providers. + * + * CREDENTIAL DATA STRUCTURE: + * The credential's encryptedData should contain a JSON object with the following fields: + * + * Required fields: + * - client_id: OAuth2 application client ID + * - client_secret: OAuth2 application client secret + * + * Optional fields (provider-specific): + * - tenant_id: Microsoft Graph tenant ID (if using Microsoft Graph) + * - authorization_endpoint: Custom authorization URL (defaults to Microsoft Graph if tenant_id provided) + * - token_endpoint: Custom token URL (defaults to Microsoft Graph if tenant_id provided) + * - redirect_uri: Custom redirect URI (defaults to this callback endpoint) + * - scope: OAuth2 scopes to request (e.g., "user.read mail.read") + * - response_type: OAuth2 response type (defaults to "code") + * - response_mode: OAuth2 response mode (defaults to "query") + * + * ENDPOINTS: + * + * 1. POST /api/v1/oauth2/authorize/:credentialId + * - Generates authorization URL for initiating OAuth2 flow + * - Uses credential ID as state parameter for security + * - Returns authorization URL to redirect user to + * + * 2. GET /api/v1/oauth2/callback + * - Handles OAuth2 callback with authorization code + * - Exchanges code for access token + * - Updates credential with token data + * - Supports Microsoft Graph and custom OAuth2 providers + * + * 3. POST /api/v1/oauth2/refresh/:credentialId + * - Refreshes expired access tokens using refresh token + * - Updates credential with new token data + * + * USAGE FLOW: + * 1. Create a credential with OAuth2 configuration (client_id, client_secret, etc.) + * 2. Call POST /oauth2/authorize/:credentialId to get authorization URL + * 3. Redirect user to authorization URL + * 4. User authorizes and gets redirected to callback endpoint + * 5. Callback endpoint exchanges code for tokens and saves them + * 6. Use POST /oauth2/refresh/:credentialId when tokens expire + * + * TOKEN STORAGE: + * After successful authorization, the credential will contain additional fields: + * - access_token: OAuth2 access token + * - refresh_token: OAuth2 refresh token (if provided) + * - token_type: Token type (usually "Bearer") + * - expires_in: Token lifetime in seconds + * - expires_at: Token expiry timestamp (ISO string) + * - granted_scope: Actual scopes granted by provider + * - token_received_at: When token was received (ISO string) + */ + +import express from 'express' +import axios from 'axios' +import { Request, Response, NextFunction } from 'express' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { Credential } from '../../database/entities/Credential' +import { decryptCredentialData, encryptCredentialData } from '../../utils' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { StatusCodes } from 'http-status-codes' +import { generateSuccessPage, generateErrorPage } from './templates' + +const router = express.Router() + +// Initiate OAuth2 authorization flow +router.post('/authorize/:credentialId', async (req: Request, res: Response, next: NextFunction) => { + try { + const { credentialId } = req.params + + const appServer = getRunningExpressApp() + const credentialRepository = appServer.AppDataSource.getRepository(Credential) + + // Find credential by ID + const credential = await credentialRepository.findOneBy({ + id: credentialId + }) + + if (!credential) { + return res.status(404).json({ + success: false, + message: 'Credential not found' + }) + } + + // Decrypt the credential data to get OAuth configuration + const decryptedData = await decryptCredentialData(credential.encryptedData) + + const { + clientId, + authorizationUrl, + redirect_uri, + scope, + response_type = 'code', + response_mode = 'query', + additionalParameters = '' + } = decryptedData + + if (!clientId) { + return res.status(400).json({ + success: false, + message: 'Missing clientId in credential data' + }) + } + + if (!authorizationUrl) { + return res.status(400).json({ + success: false, + message: 'No authorizationUrl specified in credential data' + }) + } + + const defaultRedirectUri = `${req.protocol}://${req.get('host')}/api/v1/oauth2-credential/callback` + const finalRedirectUri = redirect_uri || defaultRedirectUri + + const authParams = new URLSearchParams({ + client_id: clientId, + response_type, + response_mode, + state: credentialId, // Use credential ID as state parameter + redirect_uri: finalRedirectUri + }) + + if (scope) { + authParams.append('scope', scope) + } + + let fullAuthorizationUrl = `${authorizationUrl}?${authParams.toString()}` + + if (additionalParameters) { + fullAuthorizationUrl += `&${additionalParameters.toString()}` + } + + res.json({ + success: true, + message: 'Authorization URL generated successfully', + credentialId, + authorizationUrl: fullAuthorizationUrl, + redirectUri: finalRedirectUri + }) + } catch (error) { + next( + new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `OAuth2 authorization error: ${error instanceof Error ? error.message : 'Unknown error'}` + ) + ) + } +}) + +// OAuth2 callback endpoint +router.get('/callback', async (req: Request, res: Response) => { + try { + const { code, state, error, error_description } = req.query + + if (error) { + const errorHtml = generateErrorPage( + error as string, + (error_description as string) || 'An error occurred', + error_description ? `Description: ${error_description}` : undefined + ) + + res.setHeader('Content-Type', 'text/html') + return res.status(400).send(errorHtml) + } + + if (!code || !state) { + const errorHtml = generateErrorPage('Missing required parameters', 'Missing code or state', 'Please try again later.') + + res.setHeader('Content-Type', 'text/html') + return res.status(400).send(errorHtml) + } + + const appServer = getRunningExpressApp() + const credentialRepository = appServer.AppDataSource.getRepository(Credential) + + // Find credential by state (assuming state contains the credential ID) + const credential = await credentialRepository.findOneBy({ + id: state as string + }) + + if (!credential) { + const errorHtml = generateErrorPage( + 'Credential not found', + `Credential not found for the provided state: ${state}`, + 'Please try the authorization process again.' + ) + + res.setHeader('Content-Type', 'text/html') + return res.status(404).send(errorHtml) + } + + const decryptedData = await decryptCredentialData(credential.encryptedData) + + const { clientId, clientSecret, accessTokenUrl, redirect_uri, scope } = decryptedData + + if (!clientId || !clientSecret) { + const errorHtml = generateErrorPage( + 'Missing OAuth configuration', + 'Missing clientId or clientSecret', + 'Please check your credential setup.' + ) + + res.setHeader('Content-Type', 'text/html') + return res.status(400).send(errorHtml) + } + + let tokenUrl = accessTokenUrl + if (!tokenUrl) { + const errorHtml = generateErrorPage( + 'Missing token endpoint URL', + 'No Access Token URL specified in credential data', + 'Please check your credential configuration.' + ) + + res.setHeader('Content-Type', 'text/html') + return res.status(400).send(errorHtml) + } + + const defaultRedirectUri = `${req.protocol}://${req.get('host')}/api/v1/oauth2-credential/callback` + const finalRedirectUri = redirect_uri || defaultRedirectUri + + const tokenRequestData: any = { + client_id: clientId, + client_secret: clientSecret, + code: code as string, + grant_type: 'authorization_code', + redirect_uri: finalRedirectUri + } + + if (scope) { + tokenRequestData.scope = scope + } + + const tokenResponse = await axios.post(tokenUrl, new URLSearchParams(tokenRequestData).toString(), { + headers: { + 'Content-Type': 'application/x-www-form-urlencoded', + Accept: 'application/json' + } + }) + + const tokenData = tokenResponse.data + + // Update the credential data with token information + const updatedCredentialData: any = { + ...decryptedData, + ...tokenData, + token_received_at: new Date().toISOString() + } + + // Add refresh token if provided + if (tokenData.refresh_token) { + updatedCredentialData.refresh_token = tokenData.refresh_token + } + + // Calculate token expiry time + if (tokenData.expires_in) { + const expiryTime = new Date(Date.now() + tokenData.expires_in * 1000) + updatedCredentialData.expires_at = expiryTime.toISOString() + } + + // Encrypt the updated credential data + const encryptedData = await encryptCredentialData(updatedCredentialData) + + // Update the credential in the database + await credentialRepository.update(credential.id, { + encryptedData, + updatedDate: new Date() + }) + + // Return HTML that closes the popup window on success + const successHtml = generateSuccessPage(credential.id) + + res.setHeader('Content-Type', 'text/html') + res.send(successHtml) + } catch (error) { + if (axios.isAxiosError(error)) { + const axiosError = error + const errorHtml = generateErrorPage( + axiosError.response?.data?.error || 'token_exchange_failed', + axiosError.response?.data?.error_description || 'Token exchange failed', + axiosError.response?.data?.error_description ? `Description: ${axiosError.response?.data?.error_description}` : undefined + ) + + res.setHeader('Content-Type', 'text/html') + return res.status(400).send(errorHtml) + } + + // Generic error HTML page + const errorHtml = generateErrorPage( + 'An unexpected error occurred', + 'Please try again later.', + error instanceof Error ? error.message : 'Unknown error' + ) + + res.setHeader('Content-Type', 'text/html') + res.status(500).send(errorHtml) + } +}) + +// Refresh OAuth2 access token +router.post('/refresh/:credentialId', async (req: Request, res: Response, next: NextFunction) => { + try { + const { credentialId } = req.params + + const appServer = getRunningExpressApp() + const credentialRepository = appServer.AppDataSource.getRepository(Credential) + + const credential = await credentialRepository.findOneBy({ + id: credentialId + }) + + if (!credential) { + return res.status(404).json({ + success: false, + message: 'Credential not found' + }) + } + + const decryptedData = await decryptCredentialData(credential.encryptedData) + + const { clientId, clientSecret, refresh_token, accessTokenUrl, scope } = decryptedData + + if (!clientId || !clientSecret || !refresh_token) { + return res.status(400).json({ + success: false, + message: 'Missing required OAuth configuration: clientId, clientSecret, or refresh_token' + }) + } + + let tokenUrl = accessTokenUrl + if (!tokenUrl) { + return res.status(400).json({ + success: false, + message: 'No Access Token URL specified in credential data' + }) + } + + const refreshRequestData: any = { + client_id: clientId, + client_secret: clientSecret, + grant_type: 'refresh_token', + refresh_token + } + + if (scope) { + refreshRequestData.scope = scope + } + + const tokenResponse = await axios.post(tokenUrl, new URLSearchParams(refreshRequestData).toString(), { + headers: { + 'Content-Type': 'application/x-www-form-urlencoded', + Accept: 'application/json' + } + }) + + // Extract token data from response + const tokenData = tokenResponse.data + + // Update the credential data with new token information + const updatedCredentialData: any = { + ...decryptedData, + ...tokenData, + token_received_at: new Date().toISOString() + } + + // Update refresh token if a new one was provided + if (tokenData.refresh_token) { + updatedCredentialData.refresh_token = tokenData.refresh_token + } + + // Calculate token expiry time + if (tokenData.expires_in) { + const expiryTime = new Date(Date.now() + tokenData.expires_in * 1000) + updatedCredentialData.expires_at = expiryTime.toISOString() + } + + // Encrypt the updated credential data + const encryptedData = await encryptCredentialData(updatedCredentialData) + + // Update the credential in the database + await credentialRepository.update(credential.id, { + encryptedData, + updatedDate: new Date() + }) + + // Return success response + res.json({ + success: true, + message: 'OAuth2 token refreshed successfully', + credentialId: credential.id, + tokenInfo: { + ...tokenData, + has_new_refresh_token: !!tokenData.refresh_token, + expires_at: updatedCredentialData.expires_at + } + }) + } catch (error) { + if (axios.isAxiosError(error)) { + const axiosError = error + return res.status(400).json({ + success: false, + message: `Token refresh failed: ${axiosError.response?.data?.error_description || axiosError.message}`, + details: axiosError.response?.data + }) + } + + next( + new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `OAuth2 token refresh error: ${error instanceof Error ? error.message : 'Unknown error'}` + ) + ) + } +}) + +export default router diff --git a/packages/server/src/routes/oauth2/templates.ts b/packages/server/src/routes/oauth2/templates.ts new file mode 100644 index 000000000..6b3609746 --- /dev/null +++ b/packages/server/src/routes/oauth2/templates.ts @@ -0,0 +1,128 @@ +/** + * HTML Templates for OAuth2 Callback Pages + * + * This module contains reusable HTML templates for OAuth2 authorization responses. + * The templates provide consistent styling and behavior for success and error pages. + */ + +export interface OAuth2PageOptions { + title: string + statusIcon: string + statusText: string + statusColor: string + message: string + details?: string + postMessageType: 'OAUTH2_SUCCESS' | 'OAUTH2_ERROR' + postMessageData: any + autoCloseDelay: number +} + +export const generateOAuth2ResponsePage = (options: OAuth2PageOptions): string => { + const { title, statusIcon, statusText, statusColor, message, details, postMessageType, postMessageData, autoCloseDelay } = options + + return ` + + + + ${title} + + + +
+
${statusIcon} ${statusText}
+
${message}
+ ${details ? `
${details}
` : ''} +
+ + + + ` +} + +export const generateSuccessPage = (credentialId: string): string => { + return generateOAuth2ResponsePage({ + title: 'OAuth2 Authorization Success', + statusIcon: 'โœ“', + statusText: 'Authorization Successful', + statusColor: '#4caf50', + message: 'You can close this window now.', + postMessageType: 'OAUTH2_SUCCESS', + postMessageData: { + credentialId, + success: true, + message: 'OAuth2 authorization completed successfully' + }, + autoCloseDelay: 1000 + }) +} + +export const generateErrorPage = (error: string, message: string, details?: string): string => { + return generateOAuth2ResponsePage({ + title: 'OAuth2 Authorization Error', + statusIcon: 'โœ—', + statusText: 'Authorization Failed', + statusColor: '#f44336', + message, + details, + postMessageType: 'OAUTH2_ERROR', + postMessageData: { + success: false, + message, + error + }, + autoCloseDelay: 3000 + }) +} diff --git a/packages/server/src/routes/predictions/index.ts b/packages/server/src/routes/predictions/index.ts index 40f37eef1..077d10be4 100644 --- a/packages/server/src/routes/predictions/index.ts +++ b/packages/server/src/routes/predictions/index.ts @@ -4,6 +4,8 @@ import { getMulterStorage } from '../../utils' const router = express.Router() +// NOTE: extractChatflowId function in XSS.ts extracts the chatflow ID from the prediction URL. +// It assumes the URL format is /prediction/{chatflowId}. Make sure to update the function if the URL format changes. // CREATE router.post( ['/', '/:id'], diff --git a/packages/server/src/routes/pricing/index.ts b/packages/server/src/routes/pricing/index.ts new file mode 100644 index 000000000..ce82a6fda --- /dev/null +++ b/packages/server/src/routes/pricing/index.ts @@ -0,0 +1,8 @@ +import express from 'express' +import pricingController from '../../controllers/pricing' +const router = express.Router() + +// GET +router.get('/', pricingController.getPricing) + +export default router diff --git a/packages/server/src/routes/settings/index.ts b/packages/server/src/routes/settings/index.ts new file mode 100644 index 000000000..e311c76b4 --- /dev/null +++ b/packages/server/src/routes/settings/index.ts @@ -0,0 +1,8 @@ +import express from 'express' +import settingsController from '../../controllers/settings' +const router = express.Router() + +// CREATE +router.get('/', settingsController.getSettingsList) + +export default router diff --git a/packages/server/src/routes/text-to-speech/index.ts b/packages/server/src/routes/text-to-speech/index.ts new file mode 100644 index 000000000..56b892a53 --- /dev/null +++ b/packages/server/src/routes/text-to-speech/index.ts @@ -0,0 +1,12 @@ +import express from 'express' +import textToSpeechController from '../../controllers/text-to-speech' + +const router = express.Router() + +router.post('/generate', textToSpeechController.generateTextToSpeech) + +router.post('/abort', textToSpeechController.abortTextToSpeech) + +router.get('/voices', textToSpeechController.getVoices) + +export default router diff --git a/packages/server/src/routes/tools/index.ts b/packages/server/src/routes/tools/index.ts index e97fb5cf4..81ff90292 100644 --- a/packages/server/src/routes/tools/index.ts +++ b/packages/server/src/routes/tools/index.ts @@ -1,19 +1,20 @@ import express from 'express' import toolsController from '../../controllers/tools' +import { checkAnyPermission, checkPermission } from '../../enterprise/rbac/PermissionCheck' const router = express.Router() // CREATE -router.post('/', toolsController.createTool) +router.post('/', checkPermission('tools:create'), toolsController.createTool) // READ -router.get('/', toolsController.getAllTools) -router.get(['/', '/:id'], toolsController.getToolById) +router.get('/', checkPermission('tools:view'), toolsController.getAllTools) +router.get(['/', '/:id'], checkAnyPermission('tools:view'), toolsController.getToolById) // UPDATE -router.put(['/', '/:id'], toolsController.updateTool) +router.put(['/', '/:id'], checkAnyPermission('tools:update,tools:create'), toolsController.updateTool) // DELETE -router.delete(['/', '/:id'], toolsController.deleteTool) +router.delete(['/', '/:id'], checkPermission('tools:delete'), toolsController.deleteTool) export default router diff --git a/packages/server/src/routes/variables/index.ts b/packages/server/src/routes/variables/index.ts index f6d3625a4..20ab6e135 100644 --- a/packages/server/src/routes/variables/index.ts +++ b/packages/server/src/routes/variables/index.ts @@ -1,18 +1,19 @@ import express from 'express' import variablesController from '../../controllers/variables' +import { checkAnyPermission, checkPermission } from '../../enterprise/rbac/PermissionCheck' const router = express.Router() // CREATE -router.post('/', variablesController.createVariable) +router.post('/', checkPermission('variables:create'), variablesController.createVariable) // READ -router.get('/', variablesController.getAllVariables) +router.get('/', checkPermission('variables:view'), variablesController.getAllVariables) // UPDATE -router.put(['/', '/:id'], variablesController.updateVariable) +router.put(['/', '/:id'], checkAnyPermission('variables:create,variables:update'), variablesController.updateVariable) // DELETE -router.delete(['/', '/:id'], variablesController.deleteVariable) +router.delete(['/', '/:id'], checkPermission('variables:delete'), variablesController.deleteVariable) export default router diff --git a/packages/server/src/services/agentflowv2-generator/index.ts b/packages/server/src/services/agentflowv2-generator/index.ts index 4d987b90e..c41a20184 100644 --- a/packages/server/src/services/agentflowv2-generator/index.ts +++ b/packages/server/src/services/agentflowv2-generator/index.ts @@ -116,8 +116,9 @@ const getAllAgentflowv2Marketplaces = async () => { } }) + const title = file.split('.json')[0] const template = { - title: file.split('.json')[0], + title, description: fileDataObj.description || `Template from ${file}`, usecases: fileDataObj.usecases || [], nodes: filteredNodes, @@ -126,7 +127,11 @@ const getAllAgentflowv2Marketplaces = async () => { // Validate template against schema const validatedTemplate = AgentFlowV2Type.parse(template) - templates.push(validatedTemplate) + templates.push({ + ...validatedTemplate, + // @ts-ignore + title: title + }) } catch (error) { console.error(`Error processing template file ${file}:`, error) // Continue with next file instead of failing completely diff --git a/packages/server/src/services/apikey/index.ts b/packages/server/src/services/apikey/index.ts index 9ab92edf8..5e009c927 100644 --- a/packages/server/src/services/apikey/index.ts +++ b/packages/server/src/services/apikey/index.ts @@ -1,48 +1,41 @@ import { StatusCodes } from 'http-status-codes' -import { - addAPIKey as addAPIKey_json, - deleteAPIKey as deleteAPIKey_json, - generateAPIKey, - generateSecretHash, - getApiKey as getApiKey_json, - getAPIKeys as getAPIKeys_json, - updateAPIKey as updateAPIKey_json, - replaceAllAPIKeys as replaceAllAPIKeys_json, - importKeys as importKeys_json -} from '../../utils/apiKey' +import { generateAPIKey, generateSecretHash } from '../../utils/apiKey' import { addChatflowsCount } from '../../utils/addChatflowsCount' import { InternalFlowiseError } from '../../errors/internalFlowiseError' import { getErrorMessage } from '../../errors/utils' import { getRunningExpressApp } from '../../utils/getRunningExpressApp' import { ApiKey } from '../../database/entities/ApiKey' -import { appConfig } from '../../AppConfig' -import { randomBytes } from 'crypto' import { Not, IsNull } from 'typeorm' +import { getWorkspaceSearchOptions } from '../../enterprise/utils/ControllerServiceUtils' +import { v4 as uuidv4 } from 'uuid' -const _apikeysStoredInJson = (): boolean => { - return appConfig.apiKeys.storageType === 'json' +const getAllApiKeysFromDB = async (workspaceId: string, page: number = -1, limit: number = -1) => { + const appServer = getRunningExpressApp() + const queryBuilder = appServer.AppDataSource.getRepository(ApiKey).createQueryBuilder('api_key').orderBy('api_key.updatedDate', 'DESC') + if (page > 0 && limit > 0) { + queryBuilder.skip((page - 1) * limit) + queryBuilder.take(limit) + } + queryBuilder.andWhere('api_key.workspaceId = :workspaceId', { workspaceId }) + const [data, total] = await queryBuilder.getManyAndCount() + const keysWithChatflows = await addChatflowsCount(data) + + if (page > 0 && limit > 0) { + return { total, data: keysWithChatflows } + } else { + return keysWithChatflows + } } -const _apikeysStoredInDb = (): boolean => { - return appConfig.apiKeys.storageType === 'db' -} - -const getAllApiKeys = async () => { +const getAllApiKeys = async (workspaceId: string, autoCreateNewKey?: boolean, page: number = -1, limit: number = -1) => { try { - if (_apikeysStoredInJson()) { - const keys = await getAPIKeys_json() - return await addChatflowsCount(keys) - } else if (_apikeysStoredInDb()) { - const appServer = getRunningExpressApp() - let keys = await appServer.AppDataSource.getRepository(ApiKey).find() - if (keys.length === 0) { - await createApiKey('DefaultKey') - keys = await appServer.AppDataSource.getRepository(ApiKey).find() - } - return await addChatflowsCount(keys) - } else { - throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `UNKNOWN APIKEY_STORAGE_TYPE`) + let keys = await getAllApiKeysFromDB(workspaceId, page, limit) + const isEmpty = keys?.total === 0 || (Array.isArray(keys) && keys?.length === 0) + if (isEmpty && autoCreateNewKey) { + await createApiKey('DefaultKey', workspaceId) + keys = await getAllApiKeysFromDB(workspaceId, page, limit) } + return keys } catch (error) { throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: apikeyService.getAllApiKeys - ${getErrorMessage(error)}`) } @@ -50,90 +43,80 @@ const getAllApiKeys = async () => { const getApiKey = async (apiKey: string) => { try { - if (_apikeysStoredInJson()) { - return getApiKey_json(apiKey) - } else if (_apikeysStoredInDb()) { - const appServer = getRunningExpressApp() - const currentKey = await appServer.AppDataSource.getRepository(ApiKey).findOneBy({ - apiKey: apiKey - }) - if (!currentKey) { - return undefined - } - return currentKey - } else { - throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `UNKNOWN APIKEY_STORAGE_TYPE`) + const appServer = getRunningExpressApp() + const currentKey = await appServer.AppDataSource.getRepository(ApiKey).findOneBy({ + apiKey: apiKey + }) + if (!currentKey) { + return undefined } + return currentKey } catch (error) { - throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: apikeyService.createApiKey - ${getErrorMessage(error)}`) + throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: apikeyService.getApiKey - ${getErrorMessage(error)}`) } } -const createApiKey = async (keyName: string) => { +const getApiKeyById = async (apiKeyId: string) => { try { - if (_apikeysStoredInJson()) { - const keys = await addAPIKey_json(keyName) - return await addChatflowsCount(keys) - } else if (_apikeysStoredInDb()) { - const apiKey = generateAPIKey() - const apiSecret = generateSecretHash(apiKey) - const appServer = getRunningExpressApp() - const newKey = new ApiKey() - newKey.id = randomBytes(16).toString('hex') - newKey.apiKey = apiKey - newKey.apiSecret = apiSecret - newKey.keyName = keyName - const key = appServer.AppDataSource.getRepository(ApiKey).create(newKey) - await appServer.AppDataSource.getRepository(ApiKey).save(key) - return getAllApiKeys() - } else { - throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `UNKNOWN APIKEY_STORAGE_TYPE`) + const appServer = getRunningExpressApp() + const currentKey = await appServer.AppDataSource.getRepository(ApiKey).findOneBy({ + id: apiKeyId + }) + if (!currentKey) { + return undefined } + return currentKey + } catch (error) { + throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: apikeyService.getApiKeyById - ${getErrorMessage(error)}`) + } +} + +const createApiKey = async (keyName: string, workspaceId: string) => { + try { + const apiKey = generateAPIKey() + const apiSecret = generateSecretHash(apiKey) + const appServer = getRunningExpressApp() + const newKey = new ApiKey() + newKey.id = uuidv4() + newKey.apiKey = apiKey + newKey.apiSecret = apiSecret + newKey.keyName = keyName + newKey.workspaceId = workspaceId + const key = appServer.AppDataSource.getRepository(ApiKey).create(newKey) + await appServer.AppDataSource.getRepository(ApiKey).save(key) + return await getAllApiKeysFromDB(workspaceId) } catch (error) { throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: apikeyService.createApiKey - ${getErrorMessage(error)}`) } } // Update api key -const updateApiKey = async (id: string, keyName: string) => { +const updateApiKey = async (id: string, keyName: string, workspaceId: string) => { try { - if (_apikeysStoredInJson()) { - const keys = await updateAPIKey_json(id, keyName) - return await addChatflowsCount(keys) - } else if (_apikeysStoredInDb()) { - const appServer = getRunningExpressApp() - const currentKey = await appServer.AppDataSource.getRepository(ApiKey).findOneBy({ - id: id - }) - if (!currentKey) { - throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `ApiKey ${currentKey} not found`) - } - currentKey.keyName = keyName - await appServer.AppDataSource.getRepository(ApiKey).save(currentKey) - return getAllApiKeys() - } else { - throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `UNKNOWN APIKEY_STORAGE_TYPE`) + const appServer = getRunningExpressApp() + const currentKey = await appServer.AppDataSource.getRepository(ApiKey).findOneBy({ + id: id, + workspaceId: workspaceId + }) + if (!currentKey) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `ApiKey ${currentKey} not found`) } + currentKey.keyName = keyName + await appServer.AppDataSource.getRepository(ApiKey).save(currentKey) + return await getAllApiKeysFromDB(workspaceId) } catch (error) { throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: apikeyService.updateApiKey - ${getErrorMessage(error)}`) } } -const deleteApiKey = async (id: string) => { +const deleteApiKey = async (id: string, workspaceId: string) => { try { - if (_apikeysStoredInJson()) { - const keys = await deleteAPIKey_json(id) - return await addChatflowsCount(keys) - } else if (_apikeysStoredInDb()) { - const appServer = getRunningExpressApp() - const dbResponse = await appServer.AppDataSource.getRepository(ApiKey).delete({ id: id }) - if (!dbResponse) { - throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `ApiKey ${id} not found`) - } - return getAllApiKeys() - } else { - throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `UNKNOWN APIKEY_STORAGE_TYPE`) + const appServer = getRunningExpressApp() + const dbResponse = await appServer.AppDataSource.getRepository(ApiKey).delete({ id, workspaceId }) + if (!dbResponse) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `ApiKey ${id} not found`) } + return dbResponse } catch (error) { throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: apikeyService.deleteApiKey - ${getErrorMessage(error)}`) } @@ -142,6 +125,7 @@ const deleteApiKey = async (id: string) => { const importKeys = async (body: any) => { try { const jsonFile = body.jsonFile + const workspaceId = body.workspaceId const splitDataURI = jsonFile.split(',') if (splitDataURI[0] !== 'data:application/json;base64') { throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Invalid dataURI`) @@ -149,70 +133,98 @@ const importKeys = async (body: any) => { const bf = Buffer.from(splitDataURI[1] || '', 'base64') const plain = bf.toString('utf8') const keys = JSON.parse(plain) - if (_apikeysStoredInJson()) { - if (body.importMode === 'replaceAll') { - await replaceAllAPIKeys_json(keys) - } else { - await importKeys_json(keys, body.importMode) + + // Validate schema of imported keys + if (!Array.isArray(keys)) { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, `Invalid format: Expected an array of API keys`) + } + + const requiredFields = ['keyName', 'apiKey', 'apiSecret', 'createdAt', 'id'] + for (let i = 0; i < keys.length; i++) { + const key = keys[i] + if (typeof key !== 'object' || key === null) { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, `Invalid format: Key at index ${i} is not an object`) } - return await addChatflowsCount(keys) - } else if (_apikeysStoredInDb()) { - const appServer = getRunningExpressApp() - const allApiKeys = await appServer.AppDataSource.getRepository(ApiKey).find() - if (body.importMode === 'replaceAll') { - await appServer.AppDataSource.getRepository(ApiKey).delete({ - id: Not(IsNull()) - }) - } - if (body.importMode === 'errorIfExist') { - // if importMode is errorIfExist, check for existing keys and raise error before any modification to the DB - for (const key of keys) { - const keyNameExists = allApiKeys.find((k) => k.keyName === key.keyName) - if (keyNameExists) { - throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Key with name ${key.keyName} already exists`) - } + + for (const field of requiredFields) { + if (!(field in key)) { + throw new InternalFlowiseError( + StatusCodes.BAD_REQUEST, + `Invalid format: Key at index ${i} is missing required field '${field}'` + ) + } + if (typeof key[field] !== 'string') { + throw new InternalFlowiseError( + StatusCodes.BAD_REQUEST, + `Invalid format: Key at index ${i} field '${field}' must be a string` + ) + } + if (key[field].trim() === '') { + throw new InternalFlowiseError( + StatusCodes.BAD_REQUEST, + `Invalid format: Key at index ${i} field '${field}' cannot be empty` + ) } } - // iterate through the keys and add them to the database + } + + const appServer = getRunningExpressApp() + const allApiKeys = await appServer.AppDataSource.getRepository(ApiKey).findBy(getWorkspaceSearchOptions(workspaceId)) + if (body.importMode === 'replaceAll') { + await appServer.AppDataSource.getRepository(ApiKey).delete({ + id: Not(IsNull()), + workspaceId: workspaceId + }) + } + if (body.importMode === 'errorIfExist') { + // if importMode is errorIfExist, check for existing keys and raise error before any modification to the DB for (const key of keys) { const keyNameExists = allApiKeys.find((k) => k.keyName === key.keyName) if (keyNameExists) { - const keyIndex = allApiKeys.findIndex((k) => k.keyName === key.keyName) - switch (body.importMode) { - case 'overwriteIfExist': { - const currentKey = allApiKeys[keyIndex] - currentKey.id = key.id - currentKey.apiKey = key.apiKey - currentKey.apiSecret = key.apiSecret - await appServer.AppDataSource.getRepository(ApiKey).save(currentKey) - break - } - case 'ignoreIfExist': { - // ignore this key and continue - continue - } - case 'errorIfExist': { - // should not reach here as we have already checked for existing keys - throw new Error(`Key with name ${key.keyName} already exists`) - } - default: { - throw new Error(`Unknown overwrite option ${body.importMode}`) - } - } - } else { - const newKey = new ApiKey() - newKey.id = key.id - newKey.apiKey = key.apiKey - newKey.apiSecret = key.apiSecret - newKey.keyName = key.keyName - const newKeyEntity = appServer.AppDataSource.getRepository(ApiKey).create(newKey) - await appServer.AppDataSource.getRepository(ApiKey).save(newKeyEntity) + throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Key with name ${key.keyName} already exists`) } } - return getAllApiKeys() - } else { - throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `UNKNOWN APIKEY_STORAGE_TYPE`) } + // iterate through the keys and add them to the database + for (const key of keys) { + const keyNameExists = allApiKeys.find((k) => k.keyName === key.keyName) + if (keyNameExists) { + const keyIndex = allApiKeys.findIndex((k) => k.keyName === key.keyName) + switch (body.importMode) { + case 'overwriteIfExist': + case 'replaceAll': { + const currentKey = allApiKeys[keyIndex] + currentKey.id = uuidv4() + currentKey.apiKey = key.apiKey + currentKey.apiSecret = key.apiSecret + currentKey.workspaceId = workspaceId + await appServer.AppDataSource.getRepository(ApiKey).save(currentKey) + break + } + case 'ignoreIfExist': { + // ignore this key and continue + continue + } + case 'errorIfExist': { + // should not reach here as we have already checked for existing keys + throw new Error(`Key with name ${key.keyName} already exists`) + } + default: { + throw new Error(`Unknown overwrite option ${body.importMode}`) + } + } + } else { + const newKey = new ApiKey() + newKey.id = uuidv4() + newKey.apiKey = key.apiKey + newKey.apiSecret = key.apiSecret + newKey.keyName = key.keyName + newKey.workspaceId = workspaceId + const newKeyEntity = appServer.AppDataSource.getRepository(ApiKey).create(newKey) + await appServer.AppDataSource.getRepository(ApiKey).save(newKeyEntity) + } + } + return await getAllApiKeysFromDB(workspaceId) } catch (error) { throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: apikeyService.importKeys - ${getErrorMessage(error)}`) } @@ -220,24 +232,14 @@ const importKeys = async (body: any) => { const verifyApiKey = async (paramApiKey: string): Promise => { try { - if (_apikeysStoredInJson()) { - const apiKey = await getApiKey_json(paramApiKey) - if (!apiKey) { - throw new InternalFlowiseError(StatusCodes.UNAUTHORIZED, `Unauthorized`) - } - return 'OK' - } else if (_apikeysStoredInDb()) { - const appServer = getRunningExpressApp() - const apiKey = await appServer.AppDataSource.getRepository(ApiKey).findOneBy({ - apiKey: paramApiKey - }) - if (!apiKey) { - throw new InternalFlowiseError(StatusCodes.UNAUTHORIZED, `Unauthorized`) - } - return 'OK' - } else { - throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `UNKNOWN APIKEY_STORAGE_TYPE`) + const appServer = getRunningExpressApp() + const apiKey = await appServer.AppDataSource.getRepository(ApiKey).findOneBy({ + apiKey: paramApiKey + }) + if (!apiKey) { + throw new InternalFlowiseError(StatusCodes.UNAUTHORIZED, `Unauthorized`) } + return 'OK' } catch (error) { if (error instanceof InternalFlowiseError && error.statusCode === StatusCodes.UNAUTHORIZED) { throw error @@ -257,5 +259,6 @@ export default { updateApiKey, verifyApiKey, getApiKey, + getApiKeyById, importKeys } diff --git a/packages/server/src/services/assistants/index.ts b/packages/server/src/services/assistants/index.ts index 1ac9fff56..0151124b9 100644 --- a/packages/server/src/services/assistants/index.ts +++ b/packages/server/src/services/assistants/index.ts @@ -1,24 +1,26 @@ -import OpenAI from 'openai' +import { ICommonObject } from 'flowise-components' import { StatusCodes } from 'http-status-codes' -import { uniqWith, isEqual, cloneDeep } from 'lodash' -import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { cloneDeep, isEqual, uniqWith } from 'lodash' +import OpenAI from 'openai' +import { DeleteResult, In, QueryRunner } from 'typeorm' import { Assistant } from '../../database/entities/Assistant' import { Credential } from '../../database/entities/Credential' -import { databaseEntities, decryptCredentialData, getAppVersion } from '../../utils' +import { DocumentStore } from '../../database/entities/DocumentStore' +import { Workspace } from '../../enterprise/database/entities/workspace.entity' +import { getWorkspaceSearchOptions } from '../../enterprise/utils/ControllerServiceUtils' import { InternalFlowiseError } from '../../errors/internalFlowiseError' import { getErrorMessage } from '../../errors/utils' -import { DeleteResult, QueryRunner } from 'typeorm' -import { FLOWISE_METRIC_COUNTERS, FLOWISE_COUNTER_STATUS } from '../../Interface.Metrics' import { AssistantType } from '../../Interface' -import nodesService from '../nodes' -import { DocumentStore } from '../../database/entities/DocumentStore' -import { ICommonObject } from 'flowise-components' +import { FLOWISE_COUNTER_STATUS, FLOWISE_METRIC_COUNTERS } from '../../Interface.Metrics' +import { databaseEntities, decryptCredentialData, getAppVersion } from '../../utils' +import { INPUT_PARAMS_TYPE } from '../../utils/constants' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' import logger from '../../utils/logger' import { ASSISTANT_PROMPT_GENERATOR } from '../../utils/prompt' -import { INPUT_PARAMS_TYPE } from '../../utils/constants' -import { validate } from 'uuid' +import { checkUsageLimit } from '../../utils/quotaUsage' +import nodesService from '../nodes' -const createAssistant = async (requestBody: any): Promise => { +const createAssistant = async (requestBody: any, orgId: string): Promise => { try { const appServer = getRunningExpressApp() if (!requestBody.details) { @@ -33,10 +35,14 @@ const createAssistant = async (requestBody: any): Promise => { const assistant = appServer.AppDataSource.getRepository(Assistant).create(newAssistant) const dbResponse = await appServer.AppDataSource.getRepository(Assistant).save(assistant) - await appServer.telemetry.sendTelemetry('assistant_created', { - version: await getAppVersion(), - assistantId: dbResponse.id - }) + await appServer.telemetry.sendTelemetry( + 'assistant_created', + { + version: await getAppVersion(), + assistantId: dbResponse.id + }, + orgId + ) appServer.metricsProvider?.incrementCounter(FLOWISE_METRIC_COUNTERS.ASSISTANT_CREATED, { status: FLOWISE_COUNTER_STATUS.SUCCESS }) @@ -134,11 +140,17 @@ const createAssistant = async (requestBody: any): Promise => { const assistant = appServer.AppDataSource.getRepository(Assistant).create(newAssistant) const dbResponse = await appServer.AppDataSource.getRepository(Assistant).save(assistant) - await appServer.telemetry.sendTelemetry('assistant_created', { - version: await getAppVersion(), - assistantId: dbResponse.id - }) + await appServer.telemetry.sendTelemetry( + 'assistant_created', + { + version: await getAppVersion(), + assistantId: dbResponse.id + }, + orgId + ) + appServer.metricsProvider?.incrementCounter(FLOWISE_METRIC_COUNTERS.ASSISTANT_CREATED, { status: FLOWISE_COUNTER_STATUS.SUCCESS }) + return dbResponse } catch (error) { throw new InternalFlowiseError( @@ -148,11 +160,12 @@ const createAssistant = async (requestBody: any): Promise => { } } -const deleteAssistant = async (assistantId: string, isDeleteBoth: any): Promise => { +const deleteAssistant = async (assistantId: string, isDeleteBoth: any, workspaceId: string): Promise => { try { const appServer = getRunningExpressApp() const assistant = await appServer.AppDataSource.getRepository(Assistant).findOneBy({ - id: assistantId + id: assistantId, + workspaceId: workspaceId }) if (!assistant) { throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Assistant ${assistantId} not found`) @@ -193,16 +206,37 @@ const deleteAssistant = async (assistantId: string, isDeleteBoth: any): Promise< } } -const getAllAssistants = async (type?: AssistantType): Promise => { +async function getAssistantsCountByOrganization(type: AssistantType, organizationId: string): Promise { + try { + const appServer = getRunningExpressApp() + + const workspaces = await appServer.AppDataSource.getRepository(Workspace).findBy({ organizationId }) + const workspaceIds = workspaces.map((workspace) => workspace.id) + const assistantsCount = await appServer.AppDataSource.getRepository(Assistant).countBy({ + type, + workspaceId: In(workspaceIds) + }) + + return assistantsCount + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: assistantsService.getAssistantsCountByOrganization - ${getErrorMessage(error)}` + ) + } +} + +const getAllAssistants = async (workspaceId: string, type?: AssistantType): Promise => { try { const appServer = getRunningExpressApp() if (type) { const dbResponse = await appServer.AppDataSource.getRepository(Assistant).findBy({ - type + type, + ...getWorkspaceSearchOptions(workspaceId) }) return dbResponse } - const dbResponse = await appServer.AppDataSource.getRepository(Assistant).find() + const dbResponse = await appServer.AppDataSource.getRepository(Assistant).findBy(getWorkspaceSearchOptions(workspaceId)) return dbResponse } catch (error) { throw new InternalFlowiseError( @@ -212,11 +246,32 @@ const getAllAssistants = async (type?: AssistantType): Promise => { } } -const getAssistantById = async (assistantId: string): Promise => { +const getAllAssistantsCount = async (workspaceId: string, type?: AssistantType): Promise => { + try { + const appServer = getRunningExpressApp() + if (type) { + const dbResponse = await appServer.AppDataSource.getRepository(Assistant).countBy({ + type, + ...getWorkspaceSearchOptions(workspaceId) + }) + return dbResponse + } + const dbResponse = await appServer.AppDataSource.getRepository(Assistant).countBy(getWorkspaceSearchOptions(workspaceId)) + return dbResponse + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: assistantsService.getAllAssistantsCount - ${getErrorMessage(error)}` + ) + } +} + +const getAssistantById = async (assistantId: string, workspaceId: string): Promise => { try { const appServer = getRunningExpressApp() const dbResponse = await appServer.AppDataSource.getRepository(Assistant).findOneBy({ - id: assistantId + id: assistantId, + workspaceId: workspaceId }) if (!dbResponse) { throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Assistant ${assistantId} not found`) @@ -230,11 +285,12 @@ const getAssistantById = async (assistantId: string): Promise => { } } -const updateAssistant = async (assistantId: string, requestBody: any): Promise => { +const updateAssistant = async (assistantId: string, requestBody: any, workspaceId: string): Promise => { try { const appServer = getRunningExpressApp() const assistant = await appServer.AppDataSource.getRepository(Assistant).findOneBy({ - id: assistantId + id: assistantId, + workspaceId: workspaceId }) if (!assistant) { @@ -338,20 +394,22 @@ const updateAssistant = async (assistantId: string, requestBody: any): Promise[], queryRunner?: QueryRunner): Promise => { +const importAssistants = async ( + newAssistants: Partial[], + orgId: string, + _: string, + subscriptionId: string, + queryRunner?: QueryRunner +): Promise => { try { - for (const data of newAssistants) { - if (data.id && !validate(data.id)) { - throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: importAssistants - invalid id!`) - } - } - const appServer = getRunningExpressApp() const repository = queryRunner ? queryRunner.manager.getRepository(Assistant) : appServer.AppDataSource.getRepository(Assistant) // step 1 - check whether array is zero if (newAssistants.length == 0) return + await checkUsageLimit('flows', subscriptionId, appServer.usageCacheManager, newAssistants.length) + // step 2 - check whether ids are duplicate in database let ids = '(' let count: number = 0 @@ -406,10 +464,10 @@ const getChatModels = async (): Promise => { } } -const getDocumentStores = async (): Promise => { +const getDocumentStores = async (activeWorkspaceId: string): Promise => { try { const appServer = getRunningExpressApp() - const stores = await appServer.AppDataSource.getRepository(DocumentStore).find() + const stores = await appServer.AppDataSource.getRepository(DocumentStore).findBy(getWorkspaceSearchOptions(activeWorkspaceId)) const returnData = [] for (const store of stores) { if (store.status === 'UPSERTED') { @@ -492,11 +550,13 @@ export default { createAssistant, deleteAssistant, getAllAssistants, + getAllAssistantsCount, getAssistantById, updateAssistant, importAssistants, getChatModels, getDocumentStores, getTools, - generateAssistantInstruction + generateAssistantInstruction, + getAssistantsCountByOrganization } diff --git a/packages/server/src/services/chat-messages/index.ts b/packages/server/src/services/chat-messages/index.ts index 1ee804e0b..93adf3e5b 100644 --- a/packages/server/src/services/chat-messages/index.ts +++ b/packages/server/src/services/chat-messages/index.ts @@ -1,15 +1,16 @@ import { removeFilesFromStorage } from 'flowise-components' import { StatusCodes } from 'http-status-codes' -import { DeleteResult, FindOptionsWhere } from 'typeorm' +import { DeleteResult, FindOptionsWhere, In } from 'typeorm' import { ChatMessage } from '../../database/entities/ChatMessage' import { ChatMessageFeedback } from '../../database/entities/ChatMessageFeedback' import { InternalFlowiseError } from '../../errors/internalFlowiseError' import { getErrorMessage } from '../../errors/utils' import { ChatMessageRatingType, ChatType, IChatMessage, MODE } from '../../Interface' +import { UsageCacheManager } from '../../UsageCacheManager' import { utilAddChatMessage } from '../../utils/addChatMesage' import { utilGetChatMessage } from '../../utils/getChatMessage' import { getRunningExpressApp } from '../../utils/getRunningExpressApp' -import logger from '../../utils/logger' +import { updateStorageUsage } from '../../utils/quotaUsage' // Add chatmessages for chatflowid const createChatMessage = async (chatMessage: Partial) => { @@ -36,7 +37,10 @@ const getAllChatMessages = async ( endDate?: string, messageId?: string, feedback?: boolean, - feedbackTypes?: ChatMessageRatingType[] + feedbackTypes?: ChatMessageRatingType[], + activeWorkspaceId?: string, + page?: number, + pageSize?: number ): Promise => { try { const dbResponse = await utilGetChatMessage({ @@ -50,7 +54,10 @@ const getAllChatMessages = async ( endDate, messageId, feedback, - feedbackTypes + feedbackTypes, + activeWorkspaceId, + page, + pageSize }) return dbResponse } catch (error) { @@ -73,7 +80,8 @@ const getAllInternalChatMessages = async ( endDate?: string, messageId?: string, feedback?: boolean, - feedbackTypes?: ChatMessageRatingType[] + feedbackTypes?: ChatMessageRatingType[], + activeWorkspaceId?: string ): Promise => { try { const dbResponse = await utilGetChatMessage({ @@ -87,7 +95,8 @@ const getAllInternalChatMessages = async ( endDate, messageId, feedback, - feedbackTypes + feedbackTypes, + activeWorkspaceId }) return dbResponse } catch (error) { @@ -101,7 +110,10 @@ const getAllInternalChatMessages = async ( const removeAllChatMessages = async ( chatId: string, chatflowid: string, - deleteOptions: FindOptionsWhere + deleteOptions: FindOptionsWhere, + orgId: string, + workspaceId: string, + usageCacheManager: UsageCacheManager ): Promise => { try { const appServer = getRunningExpressApp() @@ -113,12 +125,12 @@ const removeAllChatMessages = async ( // Delete all uploads corresponding to this chatflow/chatId if (chatId) { try { - await removeFilesFromStorage(chatflowid, chatId) + const { totalSize } = await removeFilesFromStorage(orgId, chatflowid, chatId) + await updateStorageUsage(orgId, workspaceId, totalSize, usageCacheManager) } catch (e) { - logger.error(`[server]: Error deleting file storage for chatflow ${chatflowid}, chatId ${chatId}: ${e}`) + // Don't throw error if file deletion fails because file might not exist } } - const dbResponse = await appServer.AppDataSource.getRepository(ChatMessage).delete(deleteOptions) return dbResponse } catch (error) { @@ -132,7 +144,10 @@ const removeAllChatMessages = async ( const removeChatMessagesByMessageIds = async ( chatflowid: string, chatIdMap: Map, - messageIds: string[] + messageIds: string[], + orgId: string, + workspaceId: string, + usageCacheManager: UsageCacheManager ): Promise => { try { const appServer = getRunningExpressApp() @@ -149,7 +164,12 @@ const removeChatMessagesByMessageIds = async ( await appServer.AppDataSource.getRepository(ChatMessageFeedback).delete(feedbackDeleteOptions) // Delete all uploads corresponding to this chatflow/chatId - await removeFilesFromStorage(chatflowid, chatId) + try { + const { totalSize } = await removeFilesFromStorage(orgId, chatflowid, chatId) + await updateStorageUsage(orgId, workspaceId, totalSize, usageCacheManager) + } catch (e) { + // Don't throw error if file deletion fails because file might not exist + } } // Delete executions if they exist @@ -162,7 +182,7 @@ const removeChatMessagesByMessageIds = async ( } catch (error) { throw new InternalFlowiseError( StatusCodes.INTERNAL_SERVER_ERROR, - `Error: chatMessagesService.removeAllChatMessages - ${getErrorMessage(error)}` + `Error: chatMessagesService.removeChatMessagesByMessageIds - ${getErrorMessage(error)}` ) } } @@ -188,14 +208,14 @@ const abortChatMessage = async (chatId: string, chatflowid: string) => { } } -async function getAllMessages(): Promise { +async function getMessagesByChatflowIds(chatflowIds: string[]): Promise { const appServer = getRunningExpressApp() - return await appServer.AppDataSource.getRepository(ChatMessage).find() + return await appServer.AppDataSource.getRepository(ChatMessage).find({ where: { chatflowid: In(chatflowIds) } }) } -async function getAllMessagesFeedback(): Promise { +async function getMessagesFeedbackByChatflowIds(chatflowIds: string[]): Promise { const appServer = getRunningExpressApp() - return await appServer.AppDataSource.getRepository(ChatMessageFeedback).find() + return await appServer.AppDataSource.getRepository(ChatMessageFeedback).find({ where: { chatflowid: In(chatflowIds) } }) } export default { @@ -205,6 +225,6 @@ export default { removeAllChatMessages, removeChatMessagesByMessageIds, abortChatMessage, - getAllMessages, - getAllMessagesFeedback + getMessagesByChatflowIds, + getMessagesFeedbackByChatflowIds } diff --git a/packages/server/src/services/chatflows/index.ts b/packages/server/src/services/chatflows/index.ts index 1367f42fc..913eb533c 100644 --- a/packages/server/src/services/chatflows/index.ts +++ b/packages/server/src/services/chatflows/index.ts @@ -1,12 +1,15 @@ import { ICommonObject, removeFolderFromStorage } from 'flowise-components' import { StatusCodes } from 'http-status-codes' -import { QueryRunner } from 'typeorm' +import { In } from 'typeorm' import { ChatflowType, IReactFlowObject } from '../../Interface' import { FLOWISE_COUNTER_STATUS, FLOWISE_METRIC_COUNTERS } from '../../Interface.Metrics' -import { ChatFlow } from '../../database/entities/ChatFlow' +import { UsageCacheManager } from '../../UsageCacheManager' +import { ChatFlow, EnumChatflowType } from '../../database/entities/ChatFlow' import { ChatMessage } from '../../database/entities/ChatMessage' import { ChatMessageFeedback } from '../../database/entities/ChatMessageFeedback' import { UpsertHistory } from '../../database/entities/UpsertHistory' +import { Workspace } from '../../enterprise/database/entities/workspace.entity' +import { getWorkspaceSearchOptions } from '../../enterprise/utils/ControllerServiceUtils' import { InternalFlowiseError } from '../../errors/internalFlowiseError' import { getErrorMessage } from '../../errors/utils' import documentStoreService from '../../services/documentstore' @@ -15,7 +18,16 @@ import { containsBase64File, updateFlowDataWithFilePaths } from '../../utils/fil import { getRunningExpressApp } from '../../utils/getRunningExpressApp' import { utilGetUploadsConfig } from '../../utils/getUploadsConfig' import logger from '../../utils/logger' -import { validate } from 'uuid' +import { updateStorageUsage } from '../../utils/quotaUsage' + +export const enum ChatflowErrorMessage { + INVALID_CHATFLOW_TYPE = 'Invalid Chatflow Type' +} + +export function validateChatflowType(type: ChatflowType | undefined) { + if (!Object.values(EnumChatflowType).includes(type as EnumChatflowType)) + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, ChatflowErrorMessage.INVALID_CHATFLOW_TYPE) +} // Check if chatflow valid for streaming const checkIfChatflowIsValidForStreaming = async (chatflowId: string): Promise => { @@ -90,25 +102,32 @@ const checkIfChatflowIsValidForUploads = async (chatflowId: string): Promise => { +const deleteChatflow = async (chatflowId: string, orgId: string, workspaceId: string): Promise => { try { const appServer = getRunningExpressApp() + + await getChatflowById(chatflowId, workspaceId) + const dbResponse = await appServer.AppDataSource.getRepository(ChatFlow).delete({ id: chatflowId }) + + // Update document store usage + await documentStoreService.updateDocumentStoreUsage(chatflowId, undefined, workspaceId) + + // Delete all chat messages + await appServer.AppDataSource.getRepository(ChatMessage).delete({ chatflowid: chatflowId }) + + // Delete all chat feedback + await appServer.AppDataSource.getRepository(ChatMessageFeedback).delete({ chatflowid: chatflowId }) + + // Delete all upsert history + await appServer.AppDataSource.getRepository(UpsertHistory).delete({ chatflowid: chatflowId }) + try { // Delete all uploads corresponding to this chatflow - await removeFolderFromStorage(chatflowId) - await documentStoreService.updateDocumentStoreUsage(chatflowId, undefined) - - // Delete all chat messages - await appServer.AppDataSource.getRepository(ChatMessage).delete({ chatflowid: chatflowId }) - - // Delete all chat feedback - await appServer.AppDataSource.getRepository(ChatMessageFeedback).delete({ chatflowid: chatflowId }) - - // Delete all upsert history - await appServer.AppDataSource.getRepository(UpsertHistory).delete({ chatflowid: chatflowId }) + const { totalSize } = await removeFolderFromStorage(orgId, chatflowId) + await updateStorageUsage(orgId, workspaceId, totalSize, appServer.usageCacheManager) } catch (e) { - logger.error(`[server]: Error deleting file storage for chatflow ${chatflowId}: ${e}`) + logger.error(`[server]: Error deleting file storage for chatflow ${chatflowId}`) } return dbResponse } catch (error) { @@ -119,21 +138,36 @@ const deleteChatflow = async (chatflowId: string): Promise => { } } -const getAllChatflows = async (type?: ChatflowType): Promise => { +const getAllChatflows = async (type?: ChatflowType, workspaceId?: string, page: number = -1, limit: number = -1) => { try { const appServer = getRunningExpressApp() - const dbResponse = await appServer.AppDataSource.getRepository(ChatFlow).find() + + const queryBuilder = appServer.AppDataSource.getRepository(ChatFlow) + .createQueryBuilder('chat_flow') + .orderBy('chat_flow.updatedDate', 'DESC') + + if (page > 0 && limit > 0) { + queryBuilder.skip((page - 1) * limit) + queryBuilder.take(limit) + } if (type === 'MULTIAGENT') { - return dbResponse.filter((chatflow) => chatflow.type === 'MULTIAGENT') + queryBuilder.andWhere('chat_flow.type = :type', { type: 'MULTIAGENT' }) } else if (type === 'AGENTFLOW') { - return dbResponse.filter((chatflow) => chatflow.type === 'AGENTFLOW') + queryBuilder.andWhere('chat_flow.type = :type', { type: 'AGENTFLOW' }) } else if (type === 'ASSISTANT') { - return dbResponse.filter((chatflow) => chatflow.type === 'ASSISTANT') + queryBuilder.andWhere('chat_flow.type = :type', { type: 'ASSISTANT' }) } else if (type === 'CHATFLOW') { // fetch all chatflows that are not agentflow - return dbResponse.filter((chatflow) => chatflow.type === 'CHATFLOW' || !chatflow.type) + queryBuilder.andWhere('chat_flow.type = :type', { type: 'CHATFLOW' }) + } + if (workspaceId) queryBuilder.andWhere('chat_flow.workspaceId = :workspaceId', { workspaceId }) + const [data, total] = await queryBuilder.getManyAndCount() + + if (page > 0 && limit > 0) { + return { data, total } + } else { + return data } - return dbResponse } catch (error) { throw new InternalFlowiseError( StatusCodes.INTERNAL_SERVER_ERROR, @@ -142,6 +176,46 @@ const getAllChatflows = async (type?: ChatflowType): Promise => { } } +async function getAllChatflowsCountByOrganization(type: ChatflowType, organizationId: string): Promise { + try { + const appServer = getRunningExpressApp() + + const workspaces = await appServer.AppDataSource.getRepository(Workspace).findBy({ organizationId }) + const workspaceIds = workspaces.map((workspace) => workspace.id) + const chatflowsCount = await appServer.AppDataSource.getRepository(ChatFlow).countBy({ + type, + workspaceId: In(workspaceIds) + }) + + return chatflowsCount + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: chatflowsService.getAllChatflowsCountByOrganization - ${getErrorMessage(error)}` + ) + } +} + +const getAllChatflowsCount = async (type?: ChatflowType, workspaceId?: string): Promise => { + try { + const appServer = getRunningExpressApp() + if (type) { + const dbResponse = await appServer.AppDataSource.getRepository(ChatFlow).countBy({ + type, + ...getWorkspaceSearchOptions(workspaceId) + }) + return dbResponse + } + const dbResponse = await appServer.AppDataSource.getRepository(ChatFlow).countBy(getWorkspaceSearchOptions(workspaceId)) + return dbResponse + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: chatflowsService.getAllChatflowsCount - ${getErrorMessage(error)}` + ) + } +} + const getChatflowByApiKey = async (apiKeyId: string, keyonly?: unknown): Promise => { try { // Here we only get chatflows that are bounded by the apikeyid and chatflows that are not bounded by any apikey @@ -166,11 +240,14 @@ const getChatflowByApiKey = async (apiKeyId: string, keyonly?: unknown): Promise } } -const getChatflowById = async (chatflowId: string): Promise => { +const getChatflowById = async (chatflowId: string, workspaceId?: string): Promise => { try { const appServer = getRunningExpressApp() - const dbResponse = await appServer.AppDataSource.getRepository(ChatFlow).findOneBy({ - id: chatflowId + const dbResponse = await appServer.AppDataSource.getRepository(ChatFlow).findOne({ + where: { + id: chatflowId, + ...(workspaceId ? { workspaceId } : {}) + } }) if (!dbResponse) { throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Chatflow ${chatflowId} not found in the database!`) @@ -184,145 +261,93 @@ const getChatflowById = async (chatflowId: string): Promise => { } } -const saveChatflow = async (newChatFlow: ChatFlow): Promise => { - try { - const appServer = getRunningExpressApp() - let dbResponse: ChatFlow - if (containsBase64File(newChatFlow)) { - // we need a 2-step process, as we need to save the chatflow first and then update the file paths - // this is because we need the chatflow id to create the file paths +const saveChatflow = async ( + newChatFlow: ChatFlow, + orgId: string, + workspaceId: string, + subscriptionId: string, + usageCacheManager: UsageCacheManager +): Promise => { + validateChatflowType(newChatFlow.type) + const appServer = getRunningExpressApp() - // step 1 - save with empty flowData - const incomingFlowData = newChatFlow.flowData - newChatFlow.flowData = JSON.stringify({}) - const chatflow = appServer.AppDataSource.getRepository(ChatFlow).create(newChatFlow) - const step1Results = await appServer.AppDataSource.getRepository(ChatFlow).save(chatflow) + let dbResponse: ChatFlow + if (containsBase64File(newChatFlow)) { + // we need a 2-step process, as we need to save the chatflow first and then update the file paths + // this is because we need the chatflow id to create the file paths - // step 2 - convert base64 to file paths and update the chatflow - step1Results.flowData = await updateFlowDataWithFilePaths(step1Results.id, incomingFlowData) - await _checkAndUpdateDocumentStoreUsage(step1Results) - dbResponse = await appServer.AppDataSource.getRepository(ChatFlow).save(step1Results) - } else { - const chatflow = appServer.AppDataSource.getRepository(ChatFlow).create(newChatFlow) - dbResponse = await appServer.AppDataSource.getRepository(ChatFlow).save(chatflow) - } - await appServer.telemetry.sendTelemetry('chatflow_created', { + // step 1 - save with empty flowData + const incomingFlowData = newChatFlow.flowData + newChatFlow.flowData = JSON.stringify({}) + const chatflow = appServer.AppDataSource.getRepository(ChatFlow).create(newChatFlow) + const step1Results = await appServer.AppDataSource.getRepository(ChatFlow).save(chatflow) + + // step 2 - convert base64 to file paths and update the chatflow + step1Results.flowData = await updateFlowDataWithFilePaths( + step1Results.id, + incomingFlowData, + orgId, + workspaceId, + subscriptionId, + usageCacheManager + ) + await _checkAndUpdateDocumentStoreUsage(step1Results, newChatFlow.workspaceId) + dbResponse = await appServer.AppDataSource.getRepository(ChatFlow).save(step1Results) + } else { + const chatflow = appServer.AppDataSource.getRepository(ChatFlow).create(newChatFlow) + dbResponse = await appServer.AppDataSource.getRepository(ChatFlow).save(chatflow) + } + + const productId = await appServer.identityManager.getProductIdFromSubscription(subscriptionId) + + await appServer.telemetry.sendTelemetry( + 'chatflow_created', + { version: await getAppVersion(), chatflowId: dbResponse.id, - flowGraph: getTelemetryFlowObj(JSON.parse(dbResponse.flowData)?.nodes, JSON.parse(dbResponse.flowData)?.edges) - }) - appServer.metricsProvider?.incrementCounter( - dbResponse?.type === 'MULTIAGENT' ? FLOWISE_METRIC_COUNTERS.AGENTFLOW_CREATED : FLOWISE_METRIC_COUNTERS.CHATFLOW_CREATED, - { status: FLOWISE_COUNTER_STATUS.SUCCESS } - ) + flowGraph: getTelemetryFlowObj(JSON.parse(dbResponse.flowData)?.nodes, JSON.parse(dbResponse.flowData)?.edges), + productId, + subscriptionId + }, + orgId + ) - return dbResponse - } catch (error) { - throw new InternalFlowiseError( - StatusCodes.INTERNAL_SERVER_ERROR, - `Error: chatflowsService.saveChatflow - ${getErrorMessage(error)}` - ) - } + appServer.metricsProvider?.incrementCounter( + dbResponse?.type === 'MULTIAGENT' ? FLOWISE_METRIC_COUNTERS.AGENTFLOW_CREATED : FLOWISE_METRIC_COUNTERS.CHATFLOW_CREATED, + { status: FLOWISE_COUNTER_STATUS.SUCCESS } + ) + + return dbResponse } -const importChatflows = async (newChatflows: Partial[], queryRunner?: QueryRunner): Promise => { - try { - for (const data of newChatflows) { - if (data.id && !validate(data.id)) { - throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: importChatflows - invalid id!`) - } - } - - const appServer = getRunningExpressApp() - const repository = queryRunner ? queryRunner.manager.getRepository(ChatFlow) : appServer.AppDataSource.getRepository(ChatFlow) - - // step 1 - check whether file chatflows array is zero - if (newChatflows.length == 0) return - - // step 2 - check whether ids are duplicate in database - let ids = '(' - let count: number = 0 - const lastCount = newChatflows.length - 1 - newChatflows.forEach((newChatflow) => { - ids += `'${newChatflow.id}'` - if (lastCount != count) ids += ',' - if (lastCount == count) ids += ')' - count += 1 - }) - - const selectResponse = await repository.createQueryBuilder('cf').select('cf.id').where(`cf.id IN ${ids}`).getMany() - const foundIds = selectResponse.map((response) => { - return response.id - }) - - // step 3 - remove ids that are only duplicate - const prepChatflows: Partial[] = newChatflows.map((newChatflow) => { - let id: string = '' - if (newChatflow.id) id = newChatflow.id - let flowData: string = '' - if (newChatflow.flowData) flowData = newChatflow.flowData - if (foundIds.includes(id)) { - newChatflow.id = undefined - newChatflow.name += ' (1)' - } - newChatflow.flowData = JSON.stringify(JSON.parse(flowData)) - return newChatflow - }) - - // step 4 - transactional insert array of entities - const insertResponse = await repository.insert(prepChatflows) - - return insertResponse - } catch (error) { - throw new InternalFlowiseError( - StatusCodes.INTERNAL_SERVER_ERROR, - `Error: chatflowsService.saveChatflows - ${getErrorMessage(error)}` +const updateChatflow = async ( + chatflow: ChatFlow, + updateChatFlow: ChatFlow, + orgId: string, + workspaceId: string, + subscriptionId: string +): Promise => { + const appServer = getRunningExpressApp() + if (updateChatFlow.flowData && containsBase64File(updateChatFlow)) { + updateChatFlow.flowData = await updateFlowDataWithFilePaths( + chatflow.id, + updateChatFlow.flowData, + orgId, + workspaceId, + subscriptionId, + appServer.usageCacheManager ) } -} - -const updateChatflow = async (chatflow: ChatFlow, updateChatFlow: ChatFlow): Promise => { - try { - const appServer = getRunningExpressApp() - if (updateChatFlow.flowData && containsBase64File(updateChatFlow)) { - updateChatFlow.flowData = await updateFlowDataWithFilePaths(chatflow.id, updateChatFlow.flowData) - } - const newDbChatflow = appServer.AppDataSource.getRepository(ChatFlow).merge(chatflow, updateChatFlow) - await _checkAndUpdateDocumentStoreUsage(newDbChatflow) - const dbResponse = await appServer.AppDataSource.getRepository(ChatFlow).save(newDbChatflow) - - return dbResponse - } catch (error) { - throw new InternalFlowiseError( - StatusCodes.INTERNAL_SERVER_ERROR, - `Error: chatflowsService.updateChatflow - ${getErrorMessage(error)}` - ) + if (updateChatFlow.type || updateChatFlow.type === '') { + validateChatflowType(updateChatFlow.type) + } else { + updateChatFlow.type = chatflow.type } -} + const newDbChatflow = appServer.AppDataSource.getRepository(ChatFlow).merge(chatflow, updateChatFlow) + await _checkAndUpdateDocumentStoreUsage(newDbChatflow, chatflow.workspaceId) + const dbResponse = await appServer.AppDataSource.getRepository(ChatFlow).save(newDbChatflow) -// Get specific chatflow via id (PUBLIC endpoint, used when sharing chatbot link) -const getSinglePublicChatflow = async (chatflowId: string): Promise => { - try { - const appServer = getRunningExpressApp() - const dbResponse = await appServer.AppDataSource.getRepository(ChatFlow).findOneBy({ - id: chatflowId - }) - if (dbResponse && dbResponse.isPublic) { - return dbResponse - } else if (dbResponse && !dbResponse.isPublic) { - throw new InternalFlowiseError(StatusCodes.UNAUTHORIZED, `Unauthorized`) - } - throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Chatflow ${chatflowId} not found`) - } catch (error) { - if (error instanceof InternalFlowiseError && error.statusCode === StatusCodes.UNAUTHORIZED) { - throw error - } else { - throw new InternalFlowiseError( - StatusCodes.INTERNAL_SERVER_ERROR, - `Error: chatflowsService.getSinglePublicChatflow - ${getErrorMessage(error)}` - ) - } - } + return dbResponse } // Get specific chatflow chatbotConfig via id (PUBLIC endpoint, used to retrieve config for embedded chat) @@ -342,7 +367,20 @@ const getSinglePublicChatbotConfig = async (chatflowId: string): Promise => if (dbResponse.chatbotConfig || uploadsConfig) { try { const parsedConfig = dbResponse.chatbotConfig ? JSON.parse(dbResponse.chatbotConfig) : {} - return { ...parsedConfig, uploads: uploadsConfig, flowData: dbResponse.flowData } + const ttsConfig = + typeof dbResponse.textToSpeech === 'string' ? JSON.parse(dbResponse.textToSpeech) : dbResponse.textToSpeech + + let isTTSEnabled = false + if (ttsConfig) { + Object.keys(ttsConfig).forEach((provider) => { + if (provider !== 'none' && ttsConfig?.[provider]?.status) { + isTTSEnabled = true + } + }) + } + delete parsedConfig.allowedOrigins + delete parsedConfig.allowedOriginsError + return { ...parsedConfig, uploads: uploadsConfig, flowData: dbResponse.flowData, isTTSEnabled } } catch (e) { throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error parsing Chatbot Config for Chatflow ${chatflowId}`) } @@ -356,15 +394,36 @@ const getSinglePublicChatbotConfig = async (chatflowId: string): Promise => } } -const _checkAndUpdateDocumentStoreUsage = async (chatflow: ChatFlow) => { +const _checkAndUpdateDocumentStoreUsage = async (chatflow: ChatFlow, workspaceId?: string) => { const parsedFlowData: IReactFlowObject = JSON.parse(chatflow.flowData) const nodes = parsedFlowData.nodes // from the nodes array find if there is a node with name == documentStore) const node = nodes.length > 0 && nodes.find((node) => node.data.name === 'documentStore') if (!node || !node.data || !node.data.inputs || node.data.inputs['selectedStore'] === undefined) { - await documentStoreService.updateDocumentStoreUsage(chatflow.id, undefined) + await documentStoreService.updateDocumentStoreUsage(chatflow.id, undefined, workspaceId) } else { - await documentStoreService.updateDocumentStoreUsage(chatflow.id, node.data.inputs['selectedStore']) + await documentStoreService.updateDocumentStoreUsage(chatflow.id, node.data.inputs['selectedStore'], workspaceId) + } +} + +const checkIfChatflowHasChanged = async (chatflowId: string, lastUpdatedDateTime: string): Promise => { + try { + const appServer = getRunningExpressApp() + //** + const chatflow = await appServer.AppDataSource.getRepository(ChatFlow).findOneBy({ + id: chatflowId + }) + if (!chatflow) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Chatflow ${chatflowId} not found`) + } + // parse the lastUpdatedDateTime as a date and + //check if the updatedDate is the same as the lastUpdatedDateTime + return { hasChanged: chatflow.updatedDate.toISOString() !== lastUpdatedDateTime } + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: chatflowsService.checkIfChatflowHasChanged - ${getErrorMessage(error)}` + ) } } @@ -373,11 +432,12 @@ export default { checkIfChatflowIsValidForUploads, deleteChatflow, getAllChatflows, + getAllChatflowsCount, getChatflowByApiKey, getChatflowById, saveChatflow, - importChatflows, updateChatflow, - getSinglePublicChatflow, - getSinglePublicChatbotConfig + getSinglePublicChatbotConfig, + checkIfChatflowHasChanged, + getAllChatflowsCountByOrganization } diff --git a/packages/server/src/services/credentials/index.ts b/packages/server/src/services/credentials/index.ts index b92218072..9f1f318ad 100644 --- a/packages/server/src/services/credentials/index.ts +++ b/packages/server/src/services/credentials/index.ts @@ -1,16 +1,24 @@ -import { omit } from 'lodash' import { StatusCodes } from 'http-status-codes' -import { getRunningExpressApp } from '../../utils/getRunningExpressApp' -import { Credential } from '../../database/entities/Credential' -import { transformToCredentialEntity, decryptCredentialData } from '../../utils' +import { omit } from 'lodash' import { ICredentialReturnResponse } from '../../Interface' +import { Credential } from '../../database/entities/Credential' +import { WorkspaceShared } from '../../enterprise/database/entities/EnterpriseEntities' +import { WorkspaceService } from '../../enterprise/services/workspace.service' +import { getWorkspaceSearchOptions } from '../../enterprise/utils/ControllerServiceUtils' import { InternalFlowiseError } from '../../errors/internalFlowiseError' import { getErrorMessage } from '../../errors/utils' +import { decryptCredentialData, transformToCredentialEntity } from '../../utils' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' const createCredential = async (requestBody: any) => { try { const appServer = getRunningExpressApp() const newCredential = await transformToCredentialEntity(requestBody) + + if (requestBody.id) { + newCredential.id = requestBody.id + } + const credential = await appServer.AppDataSource.getRepository(Credential).create(newCredential) const dbResponse = await appServer.AppDataSource.getRepository(Credential).save(credential) return dbResponse @@ -23,10 +31,10 @@ const createCredential = async (requestBody: any) => { } // Delete all credentials from chatflowid -const deleteCredentials = async (credentialId: string): Promise => { +const deleteCredentials = async (credentialId: string, workspaceId: string): Promise => { try { const appServer = getRunningExpressApp() - const dbResponse = await appServer.AppDataSource.getRepository(Credential).delete({ id: credentialId }) + const dbResponse = await appServer.AppDataSource.getRepository(Credential).delete({ id: credentialId, workspaceId: workspaceId }) if (!dbResponse) { throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Credential ${credentialId} not found`) } @@ -39,30 +47,73 @@ const deleteCredentials = async (credentialId: string): Promise => { } } -const getAllCredentials = async (paramCredentialName: any) => { +const getAllCredentials = async (paramCredentialName: any, workspaceId: string) => { try { const appServer = getRunningExpressApp() - let dbResponse = [] + let dbResponse: any[] = [] if (paramCredentialName) { if (Array.isArray(paramCredentialName)) { for (let i = 0; i < paramCredentialName.length; i += 1) { const name = paramCredentialName[i] as string - const credentials = await appServer.AppDataSource.getRepository(Credential).findBy({ - credentialName: name - }) + const searchOptions = { + credentialName: name, + ...getWorkspaceSearchOptions(workspaceId) + } + const credentials = await appServer.AppDataSource.getRepository(Credential).findBy(searchOptions) dbResponse.push(...credentials) } } else { - const credentials = await appServer.AppDataSource.getRepository(Credential).findBy({ - credentialName: paramCredentialName as string - }) + const searchOptions = { + credentialName: paramCredentialName, + ...getWorkspaceSearchOptions(workspaceId) + } + const credentials = await appServer.AppDataSource.getRepository(Credential).findBy(searchOptions) dbResponse = [...credentials] } + // get shared credentials + if (workspaceId) { + const workspaceService = new WorkspaceService() + const sharedItems = (await workspaceService.getSharedItemsForWorkspace(workspaceId, 'credential')) as Credential[] + if (sharedItems.length) { + for (const sharedItem of sharedItems) { + // Check if paramCredentialName is array + if (Array.isArray(paramCredentialName)) { + for (let i = 0; i < paramCredentialName.length; i += 1) { + const name = paramCredentialName[i] as string + if (sharedItem.credentialName === name) { + // @ts-ignore + sharedItem.shared = true + dbResponse.push(omit(sharedItem, ['encryptedData'])) + } + } + } else { + if (sharedItem.credentialName === paramCredentialName) { + // @ts-ignore + sharedItem.shared = true + dbResponse.push(omit(sharedItem, ['encryptedData'])) + } + } + } + } + } } else { - const credentials = await appServer.AppDataSource.getRepository(Credential).find() + const credentials = await appServer.AppDataSource.getRepository(Credential).findBy(getWorkspaceSearchOptions(workspaceId)) for (const credential of credentials) { dbResponse.push(omit(credential, ['encryptedData'])) } + + // get shared credentials + if (workspaceId) { + const workspaceService = new WorkspaceService() + const sharedItems = (await workspaceService.getSharedItemsForWorkspace(workspaceId, 'credential')) as Credential[] + if (sharedItems.length) { + for (const sharedItem of sharedItems) { + // @ts-ignore + sharedItem.shared = true + dbResponse.push(omit(sharedItem, ['encryptedData'])) + } + } + } } return dbResponse } catch (error) { @@ -73,11 +124,12 @@ const getAllCredentials = async (paramCredentialName: any) => { } } -const getCredentialById = async (credentialId: string): Promise => { +const getCredentialById = async (credentialId: string, workspaceId: string): Promise => { try { const appServer = getRunningExpressApp() const credential = await appServer.AppDataSource.getRepository(Credential).findOneBy({ - id: credentialId + id: credentialId, + workspaceId: workspaceId }) if (!credential) { throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Credential ${credentialId} not found`) @@ -92,7 +144,19 @@ const getCredentialById = async (credentialId: string): Promise => { ...credential, plainDataObj: decryptedCredentialData } - const dbResponse = omit(returnCredential, ['encryptedData']) + const dbResponse: any = omit(returnCredential, ['encryptedData']) + if (workspaceId) { + const shared = await appServer.AppDataSource.getRepository(WorkspaceShared).count({ + where: { + workspaceId: workspaceId, + sharedItemId: credentialId, + itemType: 'credential' + } + }) + if (shared > 0) { + dbResponse.shared = true + } + } return dbResponse } catch (error) { throw new InternalFlowiseError( @@ -102,11 +166,12 @@ const getCredentialById = async (credentialId: string): Promise => { } } -const updateCredential = async (credentialId: string, requestBody: any): Promise => { +const updateCredential = async (credentialId: string, requestBody: any, workspaceId: string): Promise => { try { const appServer = getRunningExpressApp() const credential = await appServer.AppDataSource.getRepository(Credential).findOneBy({ - id: credentialId + id: credentialId, + workspaceId: workspaceId }) if (!credential) { throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Credential ${credentialId} not found`) @@ -114,6 +179,7 @@ const updateCredential = async (credentialId: string, requestBody: any): Promise const decryptedCredentialData = await decryptCredentialData(credential.encryptedData) requestBody.plainDataObj = { ...decryptedCredentialData, ...requestBody.plainDataObj } const updateCredential = await transformToCredentialEntity(requestBody) + updateCredential.workspaceId = workspaceId await appServer.AppDataSource.getRepository(Credential).merge(credential, updateCredential) const dbResponse = await appServer.AppDataSource.getRepository(Credential).save(credential) return dbResponse diff --git a/packages/server/src/services/dataset/index.ts b/packages/server/src/services/dataset/index.ts new file mode 100644 index 000000000..4042e420f --- /dev/null +++ b/packages/server/src/services/dataset/index.ts @@ -0,0 +1,384 @@ +import { StatusCodes } from 'http-status-codes' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { getErrorMessage } from '../../errors/utils' +import { Dataset } from '../../database/entities/Dataset' +import { DatasetRow } from '../../database/entities/DatasetRow' +import { Readable } from 'stream' +import { In } from 'typeorm' + +import csv from 'csv-parser' + +const getAllDatasets = async (workspaceId: string, page: number = -1, limit: number = -1) => { + try { + const appServer = getRunningExpressApp() + const queryBuilder = appServer.AppDataSource.getRepository(Dataset).createQueryBuilder('ds').orderBy('ds.updatedDate', 'DESC') + if (page > 0 && limit > 0) { + queryBuilder.skip((page - 1) * limit) + queryBuilder.take(limit) + } + if (workspaceId) queryBuilder.andWhere('ds.workspaceId = :workspaceId', { workspaceId }) + + const [data, total] = await queryBuilder.getManyAndCount() + + const returnObj: Dataset[] = [] + + // TODO: This is a hack to get the row count for each dataset. Need to find a better way to do this + for (const dataset of data) { + ;(dataset as any).rowCount = await appServer.AppDataSource.getRepository(DatasetRow).count({ + where: { datasetId: dataset.id } + }) + returnObj.push(dataset) + } + if (page > 0 && limit > 0) { + return { total, data: returnObj } + } else { + return returnObj + } + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: datasetService.getAllDatasets - ${getErrorMessage(error)}` + ) + } +} + +const getDataset = async (id: string, workspaceId: string, page: number = -1, limit: number = -1) => { + try { + const appServer = getRunningExpressApp() + const dataset = await appServer.AppDataSource.getRepository(Dataset).findOneBy({ + id: id, + workspaceId: workspaceId + }) + const queryBuilder = appServer.AppDataSource.getRepository(DatasetRow).createQueryBuilder('dsr').orderBy('dsr.sequenceNo', 'ASC') + queryBuilder.andWhere('dsr.datasetId = :datasetId', { datasetId: id }) + if (page > 0 && limit > 0) { + queryBuilder.skip((page - 1) * limit) + queryBuilder.take(limit) + } + let [data, total] = await queryBuilder.getManyAndCount() + // special case for sequence numbers == -1 (this happens when the update script is run and all rows are set to -1) + // check if there are any sequence numbers == -1, if so set them to the max sequence number + 1 + const missingSequenceNumbers = data.filter((item) => item.sequenceNo === -1) + if (missingSequenceNumbers.length > 0) { + const maxSequenceNumber = data.reduce((prev, current) => (prev.sequenceNo > current.sequenceNo ? prev : current)) + let sequenceNo = maxSequenceNumber.sequenceNo + 1 + for (const zeroSequenceNumber of missingSequenceNumbers) { + zeroSequenceNumber.sequenceNo = sequenceNo++ + } + await appServer.AppDataSource.getRepository(DatasetRow).save(missingSequenceNumbers) + // now get the items again + const queryBuilder2 = appServer.AppDataSource.getRepository(DatasetRow) + .createQueryBuilder('dsr') + .orderBy('dsr.sequenceNo', 'ASC') + queryBuilder2.andWhere('dsr.datasetId = :datasetId', { datasetId: id }) + if (page > 0 && limit > 0) { + queryBuilder2.skip((page - 1) * limit) + queryBuilder2.take(limit) + } + ;[data, total] = await queryBuilder2.getManyAndCount() + } + + return { + ...dataset, + rows: data, + total + } + } catch (error) { + throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: datasetService.getDataset - ${getErrorMessage(error)}`) + } +} + +const reorderDatasetRow = async (datasetId: string, rows: any[], workspaceId: string) => { + try { + const appServer = getRunningExpressApp() + await appServer.AppDataSource.transaction(async (entityManager) => { + // rows are an array of { id: string, sequenceNo: number } + // update the sequence numbers in the DB + for (const row of rows) { + const item = await entityManager.getRepository(DatasetRow).findOneBy({ + id: row.id + }) + if (!item) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Dataset Row ${row.id} not found`) + item.sequenceNo = row.sequenceNo + await entityManager.getRepository(DatasetRow).save(item) + } + await changeUpdateOnDataset(datasetId, workspaceId, entityManager) + }) + return { message: 'Dataset row reordered successfully' } + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: datasetService.reorderDatasetRow - ${getErrorMessage(error)}` + ) + } +} + +const _readCSV = async (stream: Readable, results: any[]) => { + return new Promise((resolve, reject) => { + stream + .pipe( + csv({ + headers: false + }) + ) + .on('data', (data) => results.push(data)) + .on('end', () => { + resolve(results) + }) + .on('error', reject) + }) +} + +const _csvToDatasetRows = async (datasetId: string, csvString: string, firstRowHeaders: boolean) => { + try { + const appServer = getRunningExpressApp() + // get the max value first + const maxValueEntity = await appServer.AppDataSource.getRepository(DatasetRow).find({ + order: { + sequenceNo: 'DESC' + }, + take: 1 + }) + let sequenceNo = 0 + if (maxValueEntity && maxValueEntity.length > 0) { + sequenceNo = maxValueEntity[0].sequenceNo + } + sequenceNo++ + // Array to hold parsed records + const results: any[] = [] + let files: string[] = [] + + if (csvString.startsWith('[') && csvString.endsWith(']')) { + files = JSON.parse(csvString) + } else { + files = [csvString] + } + + for (const file of files) { + const splitDataURI = file.split(',') + splitDataURI.pop() + const bf = Buffer.from(splitDataURI.pop() || '', 'base64') + const csvString = bf.toString('utf8') + + // Convert CSV string to a Readable stream + const stream = Readable.from(csvString) + const rows: any[] = [] + await _readCSV(stream, rows) + results.push(...rows) + } + if (results && results?.length > 0) { + for (let r = 0; r < results.length; r++) { + const row = results[r] + let input = '' + let output = '' + if (firstRowHeaders && r === 0) { + continue + } + input = row['0'] + output = row['1'] + const newRow = appServer.AppDataSource.getRepository(DatasetRow).create(new DatasetRow()) + newRow.datasetId = datasetId + newRow.input = input + newRow.output = output + newRow.sequenceNo = sequenceNo + await appServer.AppDataSource.getRepository(DatasetRow).save(newRow) + sequenceNo++ + } + } + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: datasetService._csvToDatasetRows - ${getErrorMessage(error)}` + ) + } +} + +// Create new dataset +const createDataset = async (body: any) => { + try { + const appServer = getRunningExpressApp() + const newDs = new Dataset() + Object.assign(newDs, body) + const dataset = appServer.AppDataSource.getRepository(Dataset).create(newDs) + const result = await appServer.AppDataSource.getRepository(Dataset).save(dataset) + if (body.csvFile) { + await _csvToDatasetRows(result.id, body.csvFile, body.firstRowHeaders) + } + return result + } catch (error) { + throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: datasetService.createDataset - ${getErrorMessage(error)}`) + } +} + +// Update dataset +const updateDataset = async (id: string, body: any, workspaceId: string) => { + try { + const appServer = getRunningExpressApp() + const dataset = await appServer.AppDataSource.getRepository(Dataset).findOneBy({ + id: id, + workspaceId: workspaceId + }) + if (!dataset) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Dataset ${id} not found`) + + const updateDataset = new Dataset() + Object.assign(updateDataset, body) + appServer.AppDataSource.getRepository(Dataset).merge(dataset, updateDataset) + const result = await appServer.AppDataSource.getRepository(Dataset).save(dataset) + return result + } catch (error) { + throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: datasetService.updateDataset - ${getErrorMessage(error)}`) + } +} + +// Delete dataset via id +const deleteDataset = async (id: string, workspaceId: string) => { + try { + const appServer = getRunningExpressApp() + const result = await appServer.AppDataSource.getRepository(Dataset).delete({ id: id, workspaceId: workspaceId }) + + // delete all rows for this dataset + await appServer.AppDataSource.getRepository(DatasetRow).delete({ datasetId: id }) + + return result + } catch (error) { + throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: datasetService.deleteDataset - ${getErrorMessage(error)}`) + } +} + +// Create new row in a given dataset +const addDatasetRow = async (body: any) => { + try { + const appServer = getRunningExpressApp() + if (body.csvFile) { + await _csvToDatasetRows(body.datasetId, body.csvFile, body.firstRowHeaders) + await changeUpdateOnDataset(body.datasetId, body.workspaceId) + return { message: 'Dataset rows added successfully' } + } else { + // get the max value first + const maxValueEntity = await appServer.AppDataSource.getRepository(DatasetRow).find({ + where: { + datasetId: body.datasetId + }, + order: { + sequenceNo: 'DESC' + }, + take: 1 + }) + let sequenceNo = 0 + if (maxValueEntity && maxValueEntity.length > 0) { + sequenceNo = maxValueEntity[0].sequenceNo + } + const newDs = new DatasetRow() + Object.assign(newDs, body) + newDs.sequenceNo = sequenceNo === 0 ? sequenceNo : sequenceNo + 1 + const row = appServer.AppDataSource.getRepository(DatasetRow).create(newDs) + const result = await appServer.AppDataSource.getRepository(DatasetRow).save(row) + await changeUpdateOnDataset(body.datasetId, body.workspaceId) + return result + } + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: datasetService.createDatasetRow - ${getErrorMessage(error)}` + ) + } +} + +const changeUpdateOnDataset = async (id: string, workspaceId: string, entityManager?: any) => { + const appServer = getRunningExpressApp() + const dataset = await appServer.AppDataSource.getRepository(Dataset).findOneBy({ + id: id, + workspaceId: workspaceId + }) + if (!dataset) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Dataset ${id} not found`) + + dataset.updatedDate = new Date() + if (entityManager) { + await entityManager.getRepository(Dataset).save(dataset) + } else { + await appServer.AppDataSource.getRepository(Dataset).save(dataset) + } +} + +// Update row for a dataset +const updateDatasetRow = async (id: string, body: any) => { + try { + const appServer = getRunningExpressApp() + const item = await appServer.AppDataSource.getRepository(DatasetRow).findOneBy({ + id: id + }) + if (!item) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Dataset Row ${id} not found`) + + const updateItem = new DatasetRow() + Object.assign(updateItem, body) + appServer.AppDataSource.getRepository(DatasetRow).merge(item, updateItem) + const result = await appServer.AppDataSource.getRepository(DatasetRow).save(item) + await changeUpdateOnDataset(body.datasetId, body.workspaceId) + return result + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: datasetService.updateDatasetRow - ${getErrorMessage(error)}` + ) + } +} + +// Delete dataset row via id +const deleteDatasetRow = async (id: string, workspaceId: string) => { + try { + const appServer = getRunningExpressApp() + return await appServer.AppDataSource.transaction(async (entityManager) => { + const item = await entityManager.getRepository(DatasetRow).findOneBy({ + id: id + }) + if (!item) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Dataset Row ${id} not found`) + + const result = await entityManager.getRepository(DatasetRow).delete({ id: id }) + await changeUpdateOnDataset(item.datasetId, workspaceId, entityManager) + return result + }) + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: datasetService.deleteDatasetRow - ${getErrorMessage(error)}` + ) + } +} + +// Delete dataset rows via ids +const patchDeleteRows = async (ids: string[] = [], workspaceId: string) => { + try { + const appServer = getRunningExpressApp() + const datasetItemsToBeDeleted = await appServer.AppDataSource.getRepository(DatasetRow).find({ + where: { + id: In(ids) + } + }) + const dbResponse = await appServer.AppDataSource.getRepository(DatasetRow).delete(ids) + + const datasetIds = [...new Set(datasetItemsToBeDeleted.map((item) => item.datasetId))] + for (const datasetId of datasetIds) { + await changeUpdateOnDataset(datasetId, workspaceId) + } + return dbResponse + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: datasetService.patchDeleteRows - ${getErrorMessage(error)}` + ) + } +} + +export default { + getAllDatasets, + getDataset, + createDataset, + updateDataset, + deleteDataset, + addDatasetRow, + updateDatasetRow, + deleteDatasetRow, + patchDeleteRows, + reorderDatasetRow +} diff --git a/packages/server/src/services/documentstore/index.ts b/packages/server/src/services/documentstore/index.ts index adea69bae..0ee1cad20 100644 --- a/packages/server/src/services/documentstore/index.ts +++ b/packages/server/src/services/documentstore/index.ts @@ -1,6 +1,4 @@ -import { getRunningExpressApp } from '../../utils/getRunningExpressApp' -import { DocumentStore } from '../../database/entities/DocumentStore' -import * as path from 'path' +import { Document } from '@langchain/core/documents' import { addArrayFilesToStorage, addSingleFileToStorage, @@ -14,9 +12,15 @@ import { removeSpecificFileFromStorage, removeSpecificFileFromUpload } from 'flowise-components' +import { StatusCodes } from 'http-status-codes' +import { cloneDeep, omit } from 'lodash' +import * as path from 'path' +import { DataSource, In } from 'typeorm' +import { v4 as uuidv4 } from 'uuid' import { addLoaderSource, ChatType, + DocumentStoreDTO, DocumentStoreStatus, IComponentNodes, IDocumentStoreFileChunkPagedResponse, @@ -27,38 +31,43 @@ import { IDocumentStoreUpsertData, IDocumentStoreWhereUsed, IExecuteDocStoreUpsert, + IExecutePreviewLoader, IExecuteProcessLoader, IExecuteVectorStoreInsert, INodeData, - MODE, IOverrideConfig, - IExecutePreviewLoader, - DocumentStoreDTO + MODE } from '../../Interface' -import { DocumentStoreFileChunk } from '../../database/entities/DocumentStoreFileChunk' -import { v4 as uuidv4 } from 'uuid' -import { databaseEntities, getAppVersion, saveUpsertFlowData } from '../../utils' -import logger from '../../utils/logger' -import nodesService from '../nodes' -import { InternalFlowiseError } from '../../errors/internalFlowiseError' -import { StatusCodes } from 'http-status-codes' -import { getErrorMessage } from '../../errors/utils' +import { UsageCacheManager } from '../../UsageCacheManager' import { ChatFlow } from '../../database/entities/ChatFlow' -import { Document } from '@langchain/core/documents' +import { DocumentStore } from '../../database/entities/DocumentStore' +import { DocumentStoreFileChunk } from '../../database/entities/DocumentStoreFileChunk' import { UpsertHistory } from '../../database/entities/UpsertHistory' -import { cloneDeep, omit } from 'lodash' +import { getWorkspaceSearchOptions } from '../../enterprise/utils/ControllerServiceUtils' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { getErrorMessage } from '../../errors/utils' +import { databaseEntities, getAppVersion, saveUpsertFlowData } from '../../utils' +import { DOCUMENT_STORE_BASE_FOLDER, INPUT_PARAMS_TYPE, OMIT_QUEUE_JOB_DATA } from '../../utils/constants' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import logger from '../../utils/logger' import { DOCUMENTSTORE_TOOL_DESCRIPTION_PROMPT_GENERATOR } from '../../utils/prompt' -import { DataSource } from 'typeorm' +import { checkStorage, updateStorageUsage } from '../../utils/quotaUsage' import { Telemetry } from '../../utils/telemetry' -import { INPUT_PARAMS_TYPE, OMIT_QUEUE_JOB_DATA } from '../../utils/constants' +import nodesService from '../nodes' -const DOCUMENT_STORE_BASE_FOLDER = 'docustore' - -const createDocumentStore = async (newDocumentStore: DocumentStore) => { +const createDocumentStore = async (newDocumentStore: DocumentStore, orgId: string) => { try { const appServer = getRunningExpressApp() + const documentStore = appServer.AppDataSource.getRepository(DocumentStore).create(newDocumentStore) const dbResponse = await appServer.AppDataSource.getRepository(DocumentStore).save(documentStore) + await appServer.telemetry.sendTelemetry( + 'document_store_created', + { + version: await getAppVersion() + }, + orgId + ) return dbResponse } catch (error) { throw new InternalFlowiseError( @@ -68,11 +77,26 @@ const createDocumentStore = async (newDocumentStore: DocumentStore) => { } } -const getAllDocumentStores = async () => { +const getAllDocumentStores = async (workspaceId: string, page: number = -1, limit: number = -1) => { try { const appServer = getRunningExpressApp() - const entities = await appServer.AppDataSource.getRepository(DocumentStore).find() - return entities + const queryBuilder = appServer.AppDataSource.getRepository(DocumentStore) + .createQueryBuilder('doc_store') + .orderBy('doc_store.updatedDate', 'DESC') + + if (page > 0 && limit > 0) { + queryBuilder.skip((page - 1) * limit) + queryBuilder.take(limit) + } + queryBuilder.andWhere('doc_store.workspaceId = :workspaceId', { workspaceId }) + + const [data, total] = await queryBuilder.getManyAndCount() + + if (page > 0 && limit > 0) { + return { data, total } + } else { + return data + } } catch (error) { throw new InternalFlowiseError( StatusCodes.INTERNAL_SERVER_ERROR, @@ -81,22 +105,21 @@ const getAllDocumentStores = async () => { } } -const getAllDocumentFileChunks = async () => { - try { - const appServer = getRunningExpressApp() - const entities = await appServer.AppDataSource.getRepository(DocumentStoreFileChunk).find() - return entities - } catch (error) { - throw new InternalFlowiseError( - StatusCodes.INTERNAL_SERVER_ERROR, - `Error: documentStoreServices.getAllDocumentFileChunks - ${getErrorMessage(error)}` - ) - } +const getAllDocumentFileChunksByDocumentStoreIds = async (documentStoreIds: string[]) => { + const appServer = getRunningExpressApp() + return await appServer.AppDataSource.getRepository(DocumentStoreFileChunk).find({ where: { storeId: In(documentStoreIds) } }) } -const deleteLoaderFromDocumentStore = async (storeId: string, docId: string) => { +const deleteLoaderFromDocumentStore = async ( + storeId: string, + docId: string, + orgId: string, + workspaceId: string, + usageCacheManager: UsageCacheManager +) => { try { const appServer = getRunningExpressApp() + const entity = await appServer.AppDataSource.getRepository(DocumentStore).findOneBy({ id: storeId }) @@ -106,6 +129,13 @@ const deleteLoaderFromDocumentStore = async (storeId: string, docId: string) => `Error: documentStoreServices.deleteLoaderFromDocumentStore - Document store ${storeId} not found` ) } + + if (workspaceId) { + if (entity?.workspaceId !== workspaceId) { + throw new Error('Unauthorized access') + } + } + const existingLoaders = JSON.parse(entity.loaders) const found = existingLoaders.find((loader: IDocumentStoreLoader) => loader.id === docId) if (found) { @@ -113,7 +143,8 @@ const deleteLoaderFromDocumentStore = async (storeId: string, docId: string) => for (const file of found.files) { if (file.name) { try { - await removeSpecificFileFromStorage(DOCUMENT_STORE_BASE_FOLDER, storeId, file.name) + const { totalSize } = await removeSpecificFileFromStorage(orgId, DOCUMENT_STORE_BASE_FOLDER, storeId, file.name) + await updateStorageUsage(orgId, workspaceId, totalSize, usageCacheManager) } catch (error) { console.error(error) } @@ -141,11 +172,12 @@ const deleteLoaderFromDocumentStore = async (storeId: string, docId: string) => } } -const getDocumentStoreById = async (storeId: string) => { +const getDocumentStoreById = async (storeId: string, workspaceId: string) => { try { const appServer = getRunningExpressApp() const entity = await appServer.AppDataSource.getRepository(DocumentStore).findOneBy({ - id: storeId + id: storeId, + workspaceId: workspaceId }) if (!entity) { throw new InternalFlowiseError( @@ -162,7 +194,7 @@ const getDocumentStoreById = async (storeId: string) => { } } -const getUsedChatflowNames = async (entity: DocumentStore) => { +const getUsedChatflowNames = async (entity: DocumentStore, workspaceId: string) => { try { const appServer = getRunningExpressApp() if (entity.whereUsed) { @@ -170,7 +202,7 @@ const getUsedChatflowNames = async (entity: DocumentStore) => { const updatedWhereUsed: IDocumentStoreWhereUsed[] = [] for (let i = 0; i < whereUsed.length; i++) { const associatedChatflow = await appServer.AppDataSource.getRepository(ChatFlow).findOne({ - where: { id: whereUsed[i] }, + where: { id: whereUsed[i], workspaceId: workspaceId }, select: ['id', 'name'] }) if (associatedChatflow) { @@ -192,10 +224,17 @@ const getUsedChatflowNames = async (entity: DocumentStore) => { } // Get chunks for a specific loader or store -const getDocumentStoreFileChunks = async (appDataSource: DataSource, storeId: string, docId: string, pageNo: number = 1) => { +const getDocumentStoreFileChunks = async ( + appDataSource: DataSource, + storeId: string, + docId: string, + workspaceId: string, + pageNo: number = 1 +) => { try { const entity = await appDataSource.getRepository(DocumentStore).findOneBy({ - id: storeId + id: storeId, + workspaceId: workspaceId }) if (!entity) { throw new InternalFlowiseError( @@ -259,6 +298,7 @@ const getDocumentStoreFileChunks = async (appDataSource: DataSource, storeId: st currentPage: pageNo, storeName: entity.name, description: entity.description, + workspaceId: entity.workspaceId, docId: docId, characters } @@ -271,21 +311,30 @@ const getDocumentStoreFileChunks = async (appDataSource: DataSource, storeId: st } } -const deleteDocumentStore = async (storeId: string) => { +const deleteDocumentStore = async (storeId: string, orgId: string, workspaceId: string, usageCacheManager: UsageCacheManager) => { try { const appServer = getRunningExpressApp() - // delete all the chunks associated with the store - await appServer.AppDataSource.getRepository(DocumentStoreFileChunk).delete({ - storeId: storeId - }) - // now delete the files associated with the store + const entity = await appServer.AppDataSource.getRepository(DocumentStore).findOneBy({ - id: storeId + id: storeId, + workspaceId: workspaceId }) if (!entity) { throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Document store ${storeId} not found`) } - await removeFilesFromStorage(DOCUMENT_STORE_BASE_FOLDER, entity.id) + + // delete all the chunks associated with the store + await appServer.AppDataSource.getRepository(DocumentStoreFileChunk).delete({ + storeId: storeId + }) + + // now delete the files associated with the store + try { + const { totalSize } = await removeFilesFromStorage(orgId, DOCUMENT_STORE_BASE_FOLDER, entity.id) + await updateStorageUsage(orgId, workspaceId, totalSize, usageCacheManager) + } catch (error) { + logger.error(`[server]: Error deleting file storage for documentStore ${storeId}`) + } // delete upsert history await appServer.AppDataSource.getRepository(UpsertHistory).delete({ @@ -306,11 +355,12 @@ const deleteDocumentStore = async (storeId: string) => { } } -const deleteDocumentStoreFileChunk = async (storeId: string, docId: string, chunkId: string) => { +const deleteDocumentStoreFileChunk = async (storeId: string, docId: string, chunkId: string, workspaceId: string) => { try { const appServer = getRunningExpressApp() const entity = await appServer.AppDataSource.getRepository(DocumentStore).findOneBy({ - id: storeId + id: storeId, + workspaceId: workspaceId }) if (!entity) { throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Document store ${storeId} not found`) @@ -332,7 +382,7 @@ const deleteDocumentStoreFileChunk = async (storeId: string, docId: string, chun found.totalChars -= tbdChunk.pageContent.length entity.loaders = JSON.stringify(loaders) await appServer.AppDataSource.getRepository(DocumentStore).save(entity) - return getDocumentStoreFileChunks(appServer.AppDataSource, storeId, docId) + return getDocumentStoreFileChunks(appServer.AppDataSource, storeId, docId, workspaceId) } catch (error) { throw new InternalFlowiseError( StatusCodes.INTERNAL_SERVER_ERROR, @@ -341,13 +391,14 @@ const deleteDocumentStoreFileChunk = async (storeId: string, docId: string, chun } } -const deleteVectorStoreFromStore = async (storeId: string) => { +const deleteVectorStoreFromStore = async (storeId: string, workspaceId: string) => { try { const appServer = getRunningExpressApp() const componentNodes = appServer.nodesPool.componentNodes const entity = await appServer.AppDataSource.getRepository(DocumentStore).findOneBy({ - id: storeId + id: storeId, + workspaceId: workspaceId }) if (!entity) { throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Document store ${storeId} not found`) @@ -420,11 +471,19 @@ const deleteVectorStoreFromStore = async (storeId: string) => { } } -const editDocumentStoreFileChunk = async (storeId: string, docId: string, chunkId: string, content: string, metadata: ICommonObject) => { +const editDocumentStoreFileChunk = async ( + storeId: string, + docId: string, + chunkId: string, + content: string, + metadata: ICommonObject, + workspaceId: string +) => { try { const appServer = getRunningExpressApp() const entity = await appServer.AppDataSource.getRepository(DocumentStore).findOneBy({ - id: storeId + id: storeId, + workspaceId: workspaceId }) if (!entity) { throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Document store ${storeId} not found`) @@ -448,7 +507,7 @@ const editDocumentStoreFileChunk = async (storeId: string, docId: string, chunkI await appServer.AppDataSource.getRepository(DocumentStoreFileChunk).save(editChunk) entity.loaders = JSON.stringify(loaders) await appServer.AppDataSource.getRepository(DocumentStore).save(entity) - return getDocumentStoreFileChunks(appServer.AppDataSource, storeId, docId) + return getDocumentStoreFileChunks(appServer.AppDataSource, storeId, docId, workspaceId) } catch (error) { throw new InternalFlowiseError( StatusCodes.INTERNAL_SERVER_ERROR, @@ -471,7 +530,16 @@ const updateDocumentStore = async (documentStore: DocumentStore, updatedDocument } } -const _saveFileToStorage = async (fileBase64: string, entity: DocumentStore) => { +const _saveFileToStorage = async ( + fileBase64: string, + entity: DocumentStore, + orgId: string, + workspaceId: string, + subscriptionId: string, + usageCacheManager: UsageCacheManager +) => { + await checkStorage(orgId, subscriptionId, usageCacheManager) + const splitDataURI = fileBase64.split(',') const filename = splitDataURI.pop()?.split(':')[1] ?? '' const bf = Buffer.from(splitDataURI.pop() || '', 'base64') @@ -480,7 +548,9 @@ const _saveFileToStorage = async (fileBase64: string, entity: DocumentStore) => if (mimePrefix) { mime = mimePrefix.split(';')[0].split(':')[1] } - await addSingleFileToStorage(mime, bf, filename, DOCUMENT_STORE_BASE_FOLDER, entity.id) + const { totalSize } = await addSingleFileToStorage(mime, bf, filename, orgId, DOCUMENT_STORE_BASE_FOLDER, entity.id) + await updateStorageUsage(orgId, workspaceId, totalSize, usageCacheManager) + return { id: uuidv4(), name: filename, @@ -517,7 +587,8 @@ const _splitIntoChunks = async (appDataSource: DataSource, componentNodes: IComp chatflowid: uuidv4(), appDataSource, databaseEntities, - logger + logger, + processRaw: true } const docNodeInstance = new nodeModule.nodeClass() let docs: IDocument[] = await docNodeInstance.init(nodeData, '', options) @@ -530,7 +601,12 @@ const _splitIntoChunks = async (appDataSource: DataSource, componentNodes: IComp } } -const _normalizeFilePaths = async (appDataSource: DataSource, data: IDocumentStoreLoaderForPreview, entity: DocumentStore | null) => { +const _normalizeFilePaths = async ( + appDataSource: DataSource, + data: IDocumentStoreLoaderForPreview, + entity: DocumentStore | null, + orgId: string +) => { const keys = Object.getOwnPropertyNames(data.loaderConfig) let rehydrated = false for (let i = 0; i < keys.length; i++) { @@ -563,7 +639,7 @@ const _normalizeFilePaths = async (appDataSource: DataSource, data: IDocumentSto if (currentLoader) { const base64Files: string[] = [] for (const file of files) { - const bf = await getFileFromStorage(file, DOCUMENT_STORE_BASE_FOLDER, documentStoreEntity.id) + const bf = await getFileFromStorage(file, orgId, DOCUMENT_STORE_BASE_FOLDER, documentStoreEntity.id) // find the file entry that has the same name as the file const uploadedFile = currentLoader.files.find((uFile: IDocumentStoreLoaderFile) => uFile.name === file) const mimePrefix = 'data:' + uploadedFile.mimePrefix + ';base64' @@ -578,7 +654,13 @@ const _normalizeFilePaths = async (appDataSource: DataSource, data: IDocumentSto data.rehydrated = rehydrated } -const previewChunksMiddleware = async (data: IDocumentStoreLoaderForPreview) => { +const previewChunksMiddleware = async ( + data: IDocumentStoreLoaderForPreview, + orgId: string, + workspaceId: string, + subscriptionId: string, + usageCacheManager: UsageCacheManager +) => { try { const appServer = getRunningExpressApp() const appDataSource = appServer.AppDataSource @@ -587,14 +669,18 @@ const previewChunksMiddleware = async (data: IDocumentStoreLoaderForPreview) => const executeData: IExecutePreviewLoader = { appDataSource, componentNodes, + usageCacheManager, data, - isPreviewOnly: true + isPreviewOnly: true, + orgId, + workspaceId, + subscriptionId } if (process.env.MODE === MODE.QUEUE) { const upsertQueue = appServer.queueManager.getQueue('upsert') const job = await upsertQueue.addJob(omit(executeData, OMIT_QUEUE_JOB_DATA)) - logger.debug(`[server]: Job added to queue: ${job.id}`) + logger.debug(`[server]: [${orgId}]: Job added to queue: ${job.id}`) const queueEvents = upsertQueue.getQueueEvents() const result = await job.waitUntilFinished(queueEvents) @@ -614,7 +700,7 @@ const previewChunksMiddleware = async (data: IDocumentStoreLoaderForPreview) => } } -export const previewChunks = async ({ appDataSource, componentNodes, data }: IExecutePreviewLoader) => { +export const previewChunks = async ({ appDataSource, componentNodes, data, orgId }: IExecutePreviewLoader) => { try { if (data.preview) { if ( @@ -626,7 +712,7 @@ export const previewChunks = async ({ appDataSource, componentNodes, data }: IEx } } if (!data.rehydrated) { - await _normalizeFilePaths(appDataSource, data, null) + await _normalizeFilePaths(appDataSource, data, null, orgId) } let docs = await _splitIntoChunks(appDataSource, componentNodes, data) const totalChunks = docs.length @@ -646,10 +732,15 @@ export const previewChunks = async ({ appDataSource, componentNodes, data }: IEx } } -const saveProcessingLoader = async (appDataSource: DataSource, data: IDocumentStoreLoaderForPreview): Promise => { +const saveProcessingLoader = async ( + appDataSource: DataSource, + data: IDocumentStoreLoaderForPreview, + workspaceId: string +): Promise => { try { const entity = await appDataSource.getRepository(DocumentStore).findOneBy({ - id: data.storeId + id: data.storeId, + workspaceId: workspaceId }) if (!entity) { throw new InternalFlowiseError( @@ -726,9 +817,19 @@ const saveProcessingLoader = async (appDataSource: DataSource, data: IDocumentSt } } -export const processLoader = async ({ appDataSource, componentNodes, data, docLoaderId }: IExecuteProcessLoader) => { +export const processLoader = async ({ + appDataSource, + componentNodes, + data, + docLoaderId, + orgId, + workspaceId, + subscriptionId, + usageCacheManager +}: IExecuteProcessLoader) => { const entity = await appDataSource.getRepository(DocumentStore).findOneBy({ - id: data.storeId + id: data.storeId, + workspaceId: workspaceId }) if (!entity) { throw new InternalFlowiseError( @@ -736,11 +837,29 @@ export const processLoader = async ({ appDataSource, componentNodes, data, docLo `Error: documentStoreServices.processLoader - Document store ${data.storeId} not found` ) } - await _saveChunksToStorage(appDataSource, componentNodes, data, entity, docLoaderId) - return getDocumentStoreFileChunks(appDataSource, data.storeId as string, docLoaderId) + await _saveChunksToStorage( + appDataSource, + componentNodes, + data, + entity, + docLoaderId, + orgId, + workspaceId, + subscriptionId, + usageCacheManager + ) + return getDocumentStoreFileChunks(appDataSource, data.storeId as string, docLoaderId, workspaceId) } -const processLoaderMiddleware = async (data: IDocumentStoreLoaderForPreview, docLoaderId: string, isInternalRequest = false) => { +const processLoaderMiddleware = async ( + data: IDocumentStoreLoaderForPreview, + docLoaderId: string, + orgId: string, + workspaceId: string, + subscriptionId: string, + usageCacheManager: UsageCacheManager, + isInternalRequest = false +) => { try { const appServer = getRunningExpressApp() const appDataSource = appServer.AppDataSource @@ -753,13 +872,17 @@ const processLoaderMiddleware = async (data: IDocumentStoreLoaderForPreview, doc data, docLoaderId, isProcessWithoutUpsert: true, - telemetry + telemetry, + orgId, + workspaceId, + subscriptionId, + usageCacheManager } if (process.env.MODE === MODE.QUEUE) { const upsertQueue = appServer.queueManager.getQueue('upsert') const job = await upsertQueue.addJob(omit(executeData, OMIT_QUEUE_JOB_DATA)) - logger.debug(`[server]: Job added to queue: ${job.id}`) + logger.debug(`[server]: [${orgId}]: Job added to queue: ${job.id}`) if (isInternalRequest) { return { @@ -790,20 +913,28 @@ const _saveChunksToStorage = async ( componentNodes: IComponentNodes, data: IDocumentStoreLoaderForPreview, entity: DocumentStore, - newLoaderId: string + newLoaderId: string, + orgId: string, + workspaceId: string, + subscriptionId: string, + usageCacheManager: UsageCacheManager ) => { const re = new RegExp('^data.*;base64', 'i') try { //step 1: restore the full paths, if any - await _normalizeFilePaths(appDataSource, data, entity) + await _normalizeFilePaths(appDataSource, data, entity, orgId) //step 2: split the file into chunks const response = await previewChunks({ appDataSource, componentNodes, data, - isPreviewOnly: false + isPreviewOnly: false, + orgId, + workspaceId, + subscriptionId, + usageCacheManager }) //step 3: remove all files associated with the loader @@ -817,7 +948,13 @@ const _saveChunksToStorage = async ( if (loader.files) { loader.files.map(async (file: IDocumentStoreLoaderFile) => { try { - await removeSpecificFileFromStorage(DOCUMENT_STORE_BASE_FOLDER, entity.id, file.name) + const { totalSize } = await removeSpecificFileFromStorage( + orgId, + DOCUMENT_STORE_BASE_FOLDER, + entity.id, + file.name + ) + await updateStorageUsage(orgId, workspaceId, totalSize, usageCacheManager) } catch (error) { console.error(error) } @@ -832,6 +969,7 @@ const _saveChunksToStorage = async ( const keys = Object.getOwnPropertyNames(data.loaderConfig) for (let i = 0; i < keys.length; i++) { const input = data.loaderConfig[keys[i]] + if (!input) { continue } @@ -844,15 +982,15 @@ const _saveChunksToStorage = async ( for (let j = 0; j < files.length; j++) { const file = files[j] if (re.test(file)) { - const fileMetadata = await _saveFileToStorage(file, entity) + const fileMetadata = await _saveFileToStorage(file, entity, orgId, workspaceId, subscriptionId, usageCacheManager) fileNames.push(fileMetadata.name) filesWithMetadata.push(fileMetadata) } } - if (fileNames.length) data.loaderConfig[keys[i]] = 'FILE-STORAGE::' + JSON.stringify(fileNames) + data.loaderConfig[keys[i]] = 'FILE-STORAGE::' + JSON.stringify(fileNames) } else if (re.test(input)) { const fileNames: string[] = [] - const fileMetadata = await _saveFileToStorage(input, entity) + const fileMetadata = await _saveFileToStorage(input, entity, orgId, workspaceId, subscriptionId, usageCacheManager) fileNames.push(fileMetadata.name) filesWithMetadata.push(fileMetadata) data.loaderConfig[keys[i]] = 'FILE-STORAGE::' + JSON.stringify(fileNames) @@ -881,18 +1019,27 @@ const _saveChunksToStorage = async ( } return acc }, 0) - response.chunks.map(async (chunk: IDocument, index: number) => { - const docChunk: DocumentStoreFileChunk = { - docId: newLoaderId, - storeId: data.storeId || '', - id: uuidv4(), - chunkNo: index + 1, - pageContent: chunk.pageContent, - metadata: JSON.stringify(chunk.metadata) - } - const dChunk = appDataSource.getRepository(DocumentStoreFileChunk).create(docChunk) - await appDataSource.getRepository(DocumentStoreFileChunk).save(dChunk) - }) + await Promise.all( + response.chunks.map(async (chunk: IDocument, index: number) => { + try { + const docChunk: DocumentStoreFileChunk = { + docId: newLoaderId, + storeId: data.storeId || '', + id: uuidv4(), + chunkNo: index + 1, + pageContent: sanitizeChunkContent(chunk.pageContent), + metadata: JSON.stringify(chunk.metadata) + } + const dChunk = appDataSource.getRepository(DocumentStoreFileChunk).create(docChunk) + await appDataSource.getRepository(DocumentStoreFileChunk).save(dChunk) + } catch (chunkError) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: documentStoreServices._saveChunksToStorage - ${getErrorMessage(chunkError)}` + ) + } + }) + ) // update the loader with the new metrics loader.totalChunks = response.totalChunks loader.totalChars = totalChars @@ -915,6 +1062,12 @@ const _saveChunksToStorage = async ( } } +// remove null bytes from chunk content +const sanitizeChunkContent = (content: string) => { + // eslint-disable-next-line no-control-regex + return content.replaceAll(/\u0000/g, '') +} + // Get all component nodes const getDocumentLoaders = async () => { const removeDocumentLoadersWithName = ['documentStore', 'vectorStoreToDocument', 'unstructuredFolderLoader', 'folderFiles'] @@ -930,12 +1083,12 @@ const getDocumentLoaders = async () => { } } -const updateDocumentStoreUsage = async (chatId: string, storeId: string | undefined) => { +const updateDocumentStoreUsage = async (chatId: string, storeId: string | undefined, workspaceId?: string) => { try { // find the document store const appServer = getRunningExpressApp() // find all entities that have the chatId in their whereUsed - const entities = await appServer.AppDataSource.getRepository(DocumentStore).find() + const entities = await appServer.AppDataSource.getRepository(DocumentStore).findBy(getWorkspaceSearchOptions(workspaceId)) entities.map(async (entity: DocumentStore) => { const whereUsed = JSON.parse(entity.whereUsed) const found = whereUsed.find((w: string) => w === chatId) @@ -976,11 +1129,12 @@ const updateDocumentStoreUsage = async (chatId: string, storeId: string | undefi } } -const updateVectorStoreConfigOnly = async (data: ICommonObject) => { +const updateVectorStoreConfigOnly = async (data: ICommonObject, workspaceId: string) => { try { const appServer = getRunningExpressApp() const entity = await appServer.AppDataSource.getRepository(DocumentStore).findOneBy({ - id: data.storeId + id: data.storeId, + workspaceId: workspaceId }) if (!entity) { throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Document store ${data.storeId} not found`) @@ -1003,10 +1157,11 @@ const updateVectorStoreConfigOnly = async (data: ICommonObject) => { ) } } -const saveVectorStoreConfig = async (appDataSource: DataSource, data: ICommonObject, isStrictSave = true) => { +const saveVectorStoreConfig = async (appDataSource: DataSource, data: ICommonObject, isStrictSave = true, workspaceId: string) => { try { const entity = await appDataSource.getRepository(DocumentStore).findOneBy({ - id: data.storeId + id: data.storeId, + workspaceId: workspaceId }) if (!entity) { throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Document store ${data.storeId} not found`) @@ -1071,14 +1226,24 @@ export const insertIntoVectorStore = async ({ componentNodes, telemetry, data, - isStrictSave + isStrictSave, + orgId, + workspaceId }: IExecuteVectorStoreInsert) => { try { - const entity = await saveVectorStoreConfig(appDataSource, data, isStrictSave) + const entity = await saveVectorStoreConfig(appDataSource, data, isStrictSave, workspaceId) entity.status = DocumentStoreStatus.UPSERTING await appDataSource.getRepository(DocumentStore).save(entity) - const indexResult = await _insertIntoVectorStoreWorkerThread(appDataSource, componentNodes, telemetry, data, isStrictSave) + const indexResult = await _insertIntoVectorStoreWorkerThread( + appDataSource, + componentNodes, + telemetry, + data, + isStrictSave, + orgId, + workspaceId + ) return indexResult } catch (error) { throw new InternalFlowiseError( @@ -1088,7 +1253,14 @@ export const insertIntoVectorStore = async ({ } } -const insertIntoVectorStoreMiddleware = async (data: ICommonObject, isStrictSave = true) => { +const insertIntoVectorStoreMiddleware = async ( + data: ICommonObject, + isStrictSave = true, + orgId: string, + workspaceId: string, + subscriptionId: string, + usageCacheManager: UsageCacheManager +) => { try { const appServer = getRunningExpressApp() const appDataSource = appServer.AppDataSource @@ -1101,13 +1273,17 @@ const insertIntoVectorStoreMiddleware = async (data: ICommonObject, isStrictSave telemetry, data, isStrictSave, - isVectorStoreInsert: true + isVectorStoreInsert: true, + orgId, + workspaceId, + subscriptionId, + usageCacheManager } if (process.env.MODE === MODE.QUEUE) { const upsertQueue = appServer.queueManager.getQueue('upsert') const job = await upsertQueue.addJob(omit(executeData, OMIT_QUEUE_JOB_DATA)) - logger.debug(`[server]: Job added to queue: ${job.id}`) + logger.debug(`[server]: [${orgId}]: Job added to queue: ${job.id}`) const queueEvents = upsertQueue.getQueueEvents() const result = await job.waitUntilFinished(queueEvents) @@ -1132,10 +1308,12 @@ const _insertIntoVectorStoreWorkerThread = async ( componentNodes: IComponentNodes, telemetry: Telemetry, data: ICommonObject, - isStrictSave = true + isStrictSave = true, + orgId: string, + workspaceId: string ) => { try { - const entity = await saveVectorStoreConfig(appDataSource, data, isStrictSave) + const entity = await saveVectorStoreConfig(appDataSource, data, isStrictSave, workspaceId) let upsertHistory: Record = {} const chatflowid = data.storeId // fake chatflowid because this is not tied to any chatflow @@ -1193,12 +1371,16 @@ const _insertIntoVectorStoreWorkerThread = async ( await appDataSource.getRepository(UpsertHistory).save(upsertHistoryItem) } - await telemetry.sendTelemetry('vector_upserted', { - version: await getAppVersion(), - chatlowId: chatflowid, - type: ChatType.INTERNAL, - flowGraph: omit(indexResult['result'], ['totalKeys', 'addedDocs']) - }) + await telemetry.sendTelemetry( + 'vector_upserted', + { + version: await getAppVersion(), + chatlowId: chatflowid, + type: ChatType.INTERNAL, + flowGraph: omit(indexResult['result'], ['totalKeys', 'addedDocs']) + }, + orgId + ) entity.status = DocumentStoreStatus.UPSERTED await appDataSource.getRepository(DocumentStore).save(entity) @@ -1459,7 +1641,11 @@ const upsertDocStore = async ( storeId: string, data: IDocumentStoreUpsertData, files: Express.Multer.File[] = [], - isRefreshExisting = false + isRefreshExisting = false, + orgId: string, + workspaceId: string, + subscriptionId: string, + usageCacheManager: UsageCacheManager ) => { const docId = data.docId let metadata = {} @@ -1470,8 +1656,12 @@ const upsertDocStore = async ( throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, `Error: Invalid metadata`) } } - const replaceExisting = data.replaceExisting ?? false - const createNewDocStore = data.createNewDocStore ?? false + const replaceExisting = + typeof data.replaceExisting === 'string' ? (data.replaceExisting as string).toLowerCase() === 'true' : data.replaceExisting ?? false + const createNewDocStore = + typeof data.createNewDocStore === 'string' + ? (data.createNewDocStore as string).toLowerCase() === 'true' + : data.createNewDocStore ?? false const newLoader = typeof data.loader === 'string' ? JSON.parse(data.loader) : data.loader const newSplitter = typeof data.splitter === 'string' ? JSON.parse(data.splitter) : data.splitter const newVectorStore = typeof data.vectorStore === 'string' ? JSON.parse(data.vectorStore) : data.vectorStore @@ -1506,6 +1696,13 @@ const upsertDocStore = async ( if (!entity) { throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Document store ${storeId} not found`) } + + if (workspaceId) { + if (entity?.workspaceId !== workspaceId) { + throw new Error('Unauthorized access') + } + } + const loaders = JSON.parse(entity.loaders) const loader = loaders.find((ldr: IDocumentStoreLoader) => ldr.id === docId) if (!loader) { @@ -1558,6 +1755,11 @@ const upsertDocStore = async ( ...newLoader?.config } + // Override loaderName if it's provided directly in data + if (data.loaderName) { + loaderName = data.loaderName + } + splitterName = newSplitter?.name ? getComponentLabelFromName(newSplitter?.name) : splitterName splitterId = newSplitter?.name || splitterId splitterConfig = { @@ -1593,7 +1795,17 @@ const upsertDocStore = async ( file.originalname = Buffer.from(file.originalname, 'latin1').toString('utf8') try { - await addArrayFilesToStorage(file.mimetype, fileBuffer, file.originalname, fileNames, DOCUMENT_STORE_BASE_FOLDER, storeId) + checkStorage(orgId, subscriptionId, usageCacheManager) + const { totalSize } = await addArrayFilesToStorage( + file.mimetype, + fileBuffer, + file.originalname, + fileNames, + orgId, + DOCUMENT_STORE_BASE_FOLDER, + storeId + ) + await updateStorageUsage(orgId, workspaceId, totalSize, usageCacheManager) } catch (error) { continue } @@ -1673,14 +1885,18 @@ const upsertDocStore = async ( } try { - const newLoader = await saveProcessingLoader(appDataSource, processData) + const newLoader = await saveProcessingLoader(appDataSource, processData, workspaceId) const result = await processLoader({ appDataSource, componentNodes, data: processData, docLoaderId: newLoader.id || '', isProcessWithoutUpsert: false, - telemetry + telemetry, + orgId, + workspaceId, + subscriptionId, + usageCacheManager }) const newDocId = result.docId @@ -1701,10 +1917,13 @@ const upsertDocStore = async ( telemetry, data: insertData, isStrictSave: false, - isVectorStoreInsert: true + isVectorStoreInsert: true, + orgId, + workspaceId, + subscriptionId, + usageCacheManager }) res.docId = newDocId - if (createNewDocStore) res.storeId = storeId return res } catch (error) { @@ -1722,17 +1941,41 @@ export const executeDocStoreUpsert = async ({ storeId, totalItems, files, - isRefreshAPI + isRefreshAPI, + orgId, + workspaceId, + subscriptionId, + usageCacheManager }: IExecuteDocStoreUpsert) => { const results = [] for (const item of totalItems) { - const res = await upsertDocStore(appDataSource, componentNodes, telemetry, storeId, item, files, isRefreshAPI) + const res = await upsertDocStore( + appDataSource, + componentNodes, + telemetry, + storeId, + item, + files, + isRefreshAPI, + orgId, + workspaceId, + subscriptionId, + usageCacheManager + ) results.push(res) } return isRefreshAPI ? results : results[0] } -const upsertDocStoreMiddleware = async (storeId: string, data: IDocumentStoreUpsertData, files: Express.Multer.File[] = []) => { +const upsertDocStoreMiddleware = async ( + storeId: string, + data: IDocumentStoreUpsertData, + files: Express.Multer.File[] = [], + orgId: string, + workspaceId: string, + subscriptionId: string, + usageCacheManager: UsageCacheManager +) => { const appServer = getRunningExpressApp() const componentNodes = appServer.nodesPool.componentNodes const appDataSource = appServer.AppDataSource @@ -1746,13 +1989,17 @@ const upsertDocStoreMiddleware = async (storeId: string, data: IDocumentStoreUps storeId, totalItems: [data], files, - isRefreshAPI: false + isRefreshAPI: false, + orgId, + workspaceId, + subscriptionId, + usageCacheManager } if (process.env.MODE === MODE.QUEUE) { const upsertQueue = appServer.queueManager.getQueue('upsert') const job = await upsertQueue.addJob(omit(executeData, OMIT_QUEUE_JOB_DATA)) - logger.debug(`[server]: Job added to queue: ${job.id}`) + logger.debug(`[server]: [${orgId}]: Job added to queue: ${job.id}`) const queueEvents = upsertQueue.getQueueEvents() const result = await job.waitUntilFinished(queueEvents) @@ -1772,7 +2019,14 @@ const upsertDocStoreMiddleware = async (storeId: string, data: IDocumentStoreUps } } -const refreshDocStoreMiddleware = async (storeId: string, data?: IDocumentStoreRefreshData) => { +const refreshDocStoreMiddleware = async ( + storeId: string, + data: IDocumentStoreRefreshData, + orgId: string, + workspaceId: string, + subscriptionId: string, + usageCacheManager: UsageCacheManager +) => { const appServer = getRunningExpressApp() const componentNodes = appServer.nodesPool.componentNodes const appDataSource = appServer.AppDataSource @@ -1787,6 +2041,12 @@ const refreshDocStoreMiddleware = async (storeId: string, data?: IDocumentStoreR throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Document store ${storeId} not found`) } + if (workspaceId) { + if (entity?.workspaceId !== workspaceId) { + throw new Error('Unauthorized access') + } + } + const loaders = JSON.parse(entity.loaders) totalItems = loaders.map((ldr: IDocumentStoreLoader) => { return { @@ -1804,13 +2064,17 @@ const refreshDocStoreMiddleware = async (storeId: string, data?: IDocumentStoreR storeId, totalItems, files: [], - isRefreshAPI: true + isRefreshAPI: true, + orgId, + workspaceId, + subscriptionId, + usageCacheManager } if (process.env.MODE === MODE.QUEUE) { const upsertQueue = appServer.queueManager.getQueue('upsert') const job = await upsertQueue.addJob(omit(executeData, OMIT_QUEUE_JOB_DATA)) - logger.debug(`[server]: Job added to queue: ${job.id}`) + logger.debug(`[server]: [${orgId}]: Job added to queue: ${job.id}`) const queueEvents = upsertQueue.getQueueEvents() const result = await job.waitUntilFinished(queueEvents) @@ -2033,7 +2297,7 @@ export default { createDocumentStore, deleteLoaderFromDocumentStore, getAllDocumentStores, - getAllDocumentFileChunks, + getAllDocumentFileChunksByDocumentStoreIds, getDocumentStoreById, getUsedChatflowNames, getDocumentStoreFileChunks, diff --git a/packages/server/src/services/evaluations/CostCalculator.ts b/packages/server/src/services/evaluations/CostCalculator.ts new file mode 100644 index 000000000..4013706ef --- /dev/null +++ b/packages/server/src/services/evaluations/CostCalculator.ts @@ -0,0 +1,57 @@ +import { ICommonObject } from 'flowise-components' + +// fractionDigits is the number of digits after the decimal point, for display purposes +const fractionDigits = 2 +// This function calculates the cost of the tokens from a metrics array +export const calculateCost = (metricsArray: ICommonObject[]) => { + for (let i = 0; i < metricsArray.length; i++) { + const metric = metricsArray[i] + const model = metric.model + if (!model) { + continue + } + const completionTokens = metric.completionTokens + const promptTokens = metric.promptTokens + const totalTokens = metric.totalTokens + + let promptTokensCost: string = '0' + let completionTokensCost: string = '0' + let totalTokensCost = '0' + if (metric.cost_values) { + let costValues: any = {} + if (metric.cost_values?.cost_values) { + costValues = metric.cost_values.cost_values + } else { + costValues = metric.cost_values + } + + if (costValues.total_price > 0) { + let cost = costValues.total_cost * (totalTokens / 1000) + totalTokensCost = formatCost(cost) + } else { + let totalCost = 0 + if (promptTokens) { + const cost = costValues.input_cost * (promptTokens / 1000) + totalCost += cost + promptTokensCost = formatCost(cost) + } + if (completionTokens) { + const cost = costValues.output_cost * (completionTokens / 1000) + totalCost += cost + completionTokensCost = formatCost(cost) + } + totalTokensCost = formatCost(totalCost) + } + } + metric['totalCost'] = totalTokensCost + metric['promptCost'] = promptTokensCost + metric['completionCost'] = completionTokensCost + } +} + +export const formatCost = (cost: number) => { + if (cost == 0) { + return '$ 0' + } + return cost < 0.01 ? '$ <0.01' : '$ ' + cost.toFixed(fractionDigits) +} diff --git a/packages/server/src/services/evaluations/EvaluatorRunner.ts b/packages/server/src/services/evaluations/EvaluatorRunner.ts new file mode 100644 index 000000000..3f2a42081 --- /dev/null +++ b/packages/server/src/services/evaluations/EvaluatorRunner.ts @@ -0,0 +1,215 @@ +import evaluatorsService from '../evaluator' +import { ICommonObject } from 'flowise-components' + +interface EvaluatorReturnType { + name: string + type?: string + operator?: string + measure?: string + value?: string + result: 'Pass' | 'Fail' | 'Error' +} + +export const runAdditionalEvaluators = async ( + metricsArray: ICommonObject[], + actualOutputArray: string[], + errorArray: string[], + selectedEvaluators: string[], + workspaceId: string +) => { + const evaluationResults: any[] = [] + const evaluatorDict: any = {} + + for (let j = 0; j < actualOutputArray.length; j++) { + const subArray: EvaluatorReturnType[] = [] + const actualOutput = actualOutputArray[j].toLowerCase().trim() + + for (let i = 0; i < selectedEvaluators.length; i++) { + const evaluatorId = selectedEvaluators[i] + let evaluator = evaluatorDict[evaluatorId] + if (!evaluator) { + evaluator = await evaluatorsService.getEvaluator(evaluatorId, workspaceId) + evaluatorDict[evaluatorId] = evaluator + } + + // iterate through each actual output and run the evaluator + const returnFields: EvaluatorReturnType = { + ...evaluator + } + if (errorArray[j]) { + // if this output is an error, skip over the evaluators. + subArray.push({ + ...returnFields, + result: 'Error' + }) + continue + } + try { + if (evaluator.type === 'numeric') { + const metric = metricsArray[j] + const metricValue = metric[evaluator.measure] + + subArray.push({ + ...returnFields, + result: evaluateExpression( + evaluator.measure !== 'responseLength' ? metricValue : actualOutput.length, + evaluator.operator, + evaluator.value + ) + ? 'Pass' + : 'Fail' + }) + } + if (evaluator.type === 'json') { + const operator = evaluator.operator + let passed = false + if (operator === 'IsValidJSON') { + try { + passed = JSON.parse(actualOutput) !== undefined + } catch (error) { + passed = false + } + } else if (operator === 'IsNotValidJSON') { + try { + JSON.parse(actualOutput) + passed = false + } catch (error) { + passed = true + } + } + subArray.push({ + ...returnFields, + result: passed ? 'Pass' : 'Fail' + }) + } + if (evaluator.type === 'text') { + const operator = evaluator.operator + const value = evaluator.value.toLowerCase().trim() as string + let splitValues = [] + let passed = false + switch (operator) { + case 'NotStartsWith': + subArray.push({ + ...returnFields, + result: actualOutput.startsWith(value) ? 'Fail' : 'Pass' + }) + break + case 'StartsWith': + subArray.push({ + ...returnFields, + result: actualOutput.startsWith(value) ? 'Pass' : 'Fail' + }) + break + case 'ContainsAny': + passed = false + splitValues = value.split(',').map((v) => v.trim().toLowerCase()) // Split, trim, and convert to lowercase + for (let i = 0; i < splitValues.length; i++) { + if (actualOutput.includes(splitValues[i])) { + passed = true + break + } + } + subArray.push({ + ...returnFields, + result: passed ? 'Pass' : 'Fail' + }) + break + case 'ContainsAll': + passed = true + splitValues = value.split(',').map((v) => v.trim().toLowerCase()) // Split, trim, and convert to lowercase + for (let i = 0; i < splitValues.length; i++) { + if (!actualOutput.includes(splitValues[i])) { + passed = false + break + } + } + subArray.push({ + ...returnFields, + result: passed ? 'Pass' : 'Fail' + }) + break + case 'DoesNotContainAny': + passed = true + splitValues = value.split(',').map((v) => v.trim().toLowerCase()) // Split, trim, and convert to lowercase + for (let i = 0; i < splitValues.length; i++) { + if (actualOutput.includes(splitValues[i])) { + passed = false + break + } + } + subArray.push({ + ...returnFields, + result: passed ? 'Fail' : 'Pass' + }) + break + case 'DoesNotContainAll': + passed = true + splitValues = value.split(',').map((v) => v.trim().toLowerCase()) // Split, trim, and convert to lowercase + for (let i = 0; i < splitValues.length; i++) { + if (actualOutput.includes(splitValues[i])) { + passed = false + break + } + } + subArray.push({ + ...returnFields, + result: passed ? 'Pass' : 'Fail' + }) + break + } + } + } catch (error) { + subArray.push({ + name: evaluator?.name || 'Missing Evaluator', + result: 'Error' + }) + } + } + evaluationResults.push(subArray) + } + // iterate through the array of evaluation results and count the number of passes and fails using the result key + let passCount = 0 + let failCount = 0 + let errorCount = 0 + for (let i = 0; i < evaluationResults.length; i++) { + const subArray = evaluationResults[i] + for (let j = 0; j < subArray.length; j++) { + if (subArray[j].result === 'Pass') { + passCount++ + } else if (subArray[j].result === 'Fail') { + failCount++ + } else if (subArray[j].result === 'Error') { + errorCount++ + } + delete subArray[j].createdDate + delete subArray[j].updatedDate + } + } + return { + results: evaluationResults, + evaluatorMetrics: { + passCount, + failCount, + errorCount + } + } +} + +const evaluateExpression = (actual: number, operator: string, expected: string) => { + switch (operator) { + case 'equals': + return actual === parseInt(expected) + case 'notEquals': + return actual !== parseInt(expected) + case 'greaterThan': + return actual > parseInt(expected) + case 'lessThan': + return actual < parseInt(expected) + case 'greaterThanOrEquals': + return actual >= parseInt(expected) + case 'lessThanOrEquals': + return actual <= parseInt(expected) + default: + return false + } +} diff --git a/packages/server/src/services/evaluations/LLMEvaluationRunner.ts b/packages/server/src/services/evaluations/LLMEvaluationRunner.ts new file mode 100644 index 000000000..351fdad60 --- /dev/null +++ b/packages/server/src/services/evaluations/LLMEvaluationRunner.ts @@ -0,0 +1,71 @@ +import { convertSchemaToZod, ICommonObject } from 'flowise-components' +import { z } from 'zod' +import { RunnableSequence } from '@langchain/core/runnables' +import { PromptTemplate } from '@langchain/core/prompts' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { databaseEntities } from '../../utils' + +export class LLMEvaluationRunner { + private llm: any + + async runLLMEvaluators(data: ICommonObject, actualOutputArray: string[], errorArray: string[], llmEvaluatorMap: any[]) { + const evaluationResults: any[] = [] + if (this.llm === undefined) { + this.llm = await this.createLLM(data) + } + + for (let j = 0; j < actualOutputArray.length; j++) { + const actualOutput = actualOutputArray[j] + for (let i = 0; i < llmEvaluatorMap.length; i++) { + if (errorArray[j] !== '') { + evaluationResults.push({ + error: 'Not Graded!' + }) + continue + } + try { + const llmEvaluator = llmEvaluatorMap[i] + let evaluator = llmEvaluator.evaluator + const schema = z.object(convertSchemaToZod(JSON.stringify(evaluator.outputSchema))) + const modelWithStructuredOutput = this.llm.withStructuredOutput(schema) + const llmExecutor = RunnableSequence.from([ + PromptTemplate.fromTemplate(evaluator.prompt as string), + modelWithStructuredOutput + ]) + const response = await llmExecutor.invoke({ + question: data.input, + actualOutput: actualOutput, + expectedOutput: data.expectedOutput + }) + evaluationResults.push(response) + } catch (error) { + evaluationResults.push({ + error: 'error' + }) + } + } + } + return evaluationResults + } + + async createLLM(data: ICommonObject): Promise { + try { + const appServer = getRunningExpressApp() + const nodeInstanceFilePath = appServer.nodesPool.componentNodes[data.llmConfig.llm].filePath as string + const nodeModule = await import(nodeInstanceFilePath) + const newNodeInstance = new nodeModule.nodeClass() + let nodeData = { + inputs: { modelName: data.llmConfig.model }, + credential: data.llmConfig.credentialId, + id: 'llm_0' + } + const options: ICommonObject = { + appDataSource: appServer.AppDataSource, + databaseEntities: databaseEntities + } + return await newNodeInstance.init(nodeData, undefined, options) + } catch (error) { + throw new Error('Error creating LLM') + } + } +} diff --git a/packages/server/src/services/evaluations/index.ts b/packages/server/src/services/evaluations/index.ts new file mode 100644 index 000000000..fe0aae71b --- /dev/null +++ b/packages/server/src/services/evaluations/index.ts @@ -0,0 +1,661 @@ +import { StatusCodes } from 'http-status-codes' +import { EvaluationRunner, ICommonObject } from 'flowise-components' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { getErrorMessage } from '../../errors/utils' +import { Dataset } from '../../database/entities/Dataset' +import { DatasetRow } from '../../database/entities/DatasetRow' +import { Evaluation } from '../../database/entities/Evaluation' +import { EvaluationStatus, IEvaluationResult } from '../../Interface' +import { EvaluationRun } from '../../database/entities/EvaluationRun' +import { Credential } from '../../database/entities/Credential' +import { ApiKey } from '../../database/entities/ApiKey' +import { ChatFlow } from '../../database/entities/ChatFlow' +import { getAppVersion } from '../../utils' +import { In } from 'typeorm' +import { getWorkspaceSearchOptions } from '../../enterprise/utils/ControllerServiceUtils' +import { v4 as uuidv4 } from 'uuid' +import { calculateCost, formatCost } from './CostCalculator' +import { runAdditionalEvaluators } from './EvaluatorRunner' +import evaluatorsService from '../evaluator' +import { LLMEvaluationRunner } from './LLMEvaluationRunner' +import { Assistant } from '../../database/entities/Assistant' + +const runAgain = async (id: string, baseURL: string, orgId: string, workspaceId: string) => { + try { + const appServer = getRunningExpressApp() + const evaluation = await appServer.AppDataSource.getRepository(Evaluation).findOneBy({ + id: id, + workspaceId: workspaceId + }) + if (!evaluation) throw new Error(`Evaluation ${id} not found`) + const additionalConfig = evaluation.additionalConfig ? JSON.parse(evaluation.additionalConfig) : {} + const data: ICommonObject = { + chatflowId: evaluation.chatflowId, + chatflowName: evaluation.chatflowName, + datasetName: evaluation.datasetName, + datasetId: evaluation.datasetId, + evaluationType: evaluation.evaluationType, + selectedSimpleEvaluators: JSON.stringify(additionalConfig.simpleEvaluators), + datasetAsOneConversation: additionalConfig.datasetAsOneConversation, + chatflowType: JSON.stringify(additionalConfig.chatflowTypes ? additionalConfig.chatflowTypes : []) + } + data.name = evaluation.name + data.workspaceId = evaluation.workspaceId + if (evaluation.evaluationType === 'llm') { + data.selectedLLMEvaluators = JSON.stringify(additionalConfig.lLMEvaluators) + data.credentialId = additionalConfig.credentialId + // this is to preserve backward compatibility for evaluations created before the llm/model options were added + if (!additionalConfig.credentialId && additionalConfig.llmConfig) { + data.model = additionalConfig.llmConfig.model + data.llm = additionalConfig.llmConfig.llm + data.credentialId = additionalConfig.llmConfig.credentialId + } else { + data.model = 'gpt-3.5-turbo' + data.llm = 'OpenAI' + } + } + data.version = true + return await createEvaluation(data, baseURL, orgId, workspaceId) + } catch (error) { + throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: EvalsService.runAgain - ${getErrorMessage(error)}`) + } +} + +const createEvaluation = async (body: ICommonObject, baseURL: string, orgId: string, workspaceId: string) => { + try { + const appServer = getRunningExpressApp() + const newEval = new Evaluation() + Object.assign(newEval, body) + newEval.status = EvaluationStatus.PENDING + + const row = appServer.AppDataSource.getRepository(Evaluation).create(newEval) + row.average_metrics = JSON.stringify({}) + + const additionalConfig: ICommonObject = { + chatflowTypes: body.chatflowType ? JSON.parse(body.chatflowType) : [], + datasetAsOneConversation: body.datasetAsOneConversation, + simpleEvaluators: body.selectedSimpleEvaluators.length > 0 ? JSON.parse(body.selectedSimpleEvaluators) : [] + } + + if (body.evaluationType === 'llm') { + additionalConfig.lLMEvaluators = body.selectedLLMEvaluators.length > 0 ? JSON.parse(body.selectedLLMEvaluators) : [] + additionalConfig.llmConfig = { + credentialId: body.credentialId, + llm: body.llm, + model: body.model + } + } + row.additionalConfig = JSON.stringify(additionalConfig) + const newEvaluation = await appServer.AppDataSource.getRepository(Evaluation).save(row) + + await appServer.telemetry.sendTelemetry( + 'evaluation_created', + { + version: await getAppVersion() + }, + orgId + ) + + const dataset = await appServer.AppDataSource.getRepository(Dataset).findOneBy({ + id: body.datasetId, + workspaceId: workspaceId + }) + if (!dataset) throw new Error(`Dataset ${body.datasetId} not found`) + + const items = await appServer.AppDataSource.getRepository(DatasetRow).find({ + where: { datasetId: dataset.id }, + order: { sequenceNo: 'ASC' } + }) + ;(dataset as any).rows = items + + const data: ICommonObject = { + chatflowId: body.chatflowId, + dataset: dataset, + evaluationType: body.evaluationType, + evaluationId: newEvaluation.id, + credentialId: body.credentialId + } + if (body.datasetAsOneConversation) { + data.sessionId = uuidv4() + } + + // When chatflow has an APIKey + const apiKeys: { chatflowId: string; apiKey: string }[] = [] + const chatflowIds = JSON.parse(body.chatflowId) + for (let i = 0; i < chatflowIds.length; i++) { + const chatflowId = chatflowIds[i] + const cFlow = await appServer.AppDataSource.getRepository(ChatFlow).findOneBy({ + id: chatflowId, + workspaceId: workspaceId + }) + if (cFlow && cFlow.apikeyid) { + const apikeyObj = await appServer.AppDataSource.getRepository(ApiKey).findOneBy({ + id: cFlow.apikeyid + }) + if (apikeyObj) { + apiKeys.push({ + chatflowId: chatflowId, + apiKey: apikeyObj.apiKey + }) + } + } + } + if (apiKeys.length > 0) { + data.apiKeys = apiKeys + } + + // save the evaluation with status as pending + const evalRunner = new EvaluationRunner(baseURL) + if (body.evaluationType === 'llm') { + const credential = await appServer.AppDataSource.getRepository(Credential).findOneBy({ + id: body.credentialId + }) + + if (!credential) throw new Error(`Credential ${body.credentialId} not found`) + } + + let evalMetrics = { passCount: 0, failCount: 0, errorCount: 0 } + evalRunner + .runEvaluations(data) + .then(async (result) => { + let totalTime = 0 + // let us assume that the eval is successful + let allRowsSuccessful = true + try { + const llmEvaluationRunner = new LLMEvaluationRunner() + for (const resultRow of result.rows) { + const metricsArray: ICommonObject[] = [] + const actualOutputArray: string[] = [] + const errorArray: string[] = [] + for (const evaluationRow of resultRow.evaluations) { + if (evaluationRow.status === 'error') { + // if a row failed, mark the entire run as failed (error) + allRowsSuccessful = false + } + actualOutputArray.push(evaluationRow.actualOutput) + totalTime += parseFloat(evaluationRow.latency) + let metricsObjFromRun: ICommonObject = {} + + let nested_metrics = evaluationRow.nested_metrics + + let promptTokens = 0, + completionTokens = 0, + totalTokens = 0 + let inputCost = 0, + outputCost = 0, + totalCost = 0 + if (nested_metrics && nested_metrics.length > 0) { + for (let i = 0; i < nested_metrics.length; i++) { + const nested_metric = nested_metrics[i] + if (nested_metric.model && nested_metric.promptTokens > 0) { + promptTokens += nested_metric.promptTokens + completionTokens += nested_metric.completionTokens + totalTokens += nested_metric.totalTokens + + inputCost += nested_metric.cost_values.input_cost + outputCost += nested_metric.cost_values.output_cost + totalCost += nested_metric.cost_values.total_cost + + nested_metric['totalCost'] = formatCost(nested_metric.cost_values.total_cost) + nested_metric['promptCost'] = formatCost(nested_metric.cost_values.input_cost) + nested_metric['completionCost'] = formatCost(nested_metric.cost_values.output_cost) + } + } + nested_metrics = nested_metrics.filter((metric: any) => { + return metric.model && metric.provider + }) + } + const metrics = evaluationRow.metrics + if (metrics) { + if (nested_metrics && nested_metrics.length > 0) { + metrics.push({ + promptTokens: promptTokens, + completionTokens: completionTokens, + totalTokens: totalTokens, + totalCost: formatCost(totalCost), + promptCost: formatCost(inputCost), + completionCost: formatCost(outputCost) + }) + metricsObjFromRun.nested_metrics = nested_metrics + } + metrics.map((metric: any) => { + if (metric) { + const json = typeof metric === 'object' ? metric : JSON.parse(metric) + Object.getOwnPropertyNames(json).map((key) => { + metricsObjFromRun[key] = json[key] + }) + } + }) + metricsArray.push(metricsObjFromRun) + } + errorArray.push(evaluationRow.error) + } + + const newRun = new EvaluationRun() + newRun.evaluationId = newEvaluation.id + newRun.runDate = new Date() + newRun.input = resultRow.input + newRun.expectedOutput = resultRow.expectedOutput + newRun.actualOutput = JSON.stringify(actualOutputArray) + newRun.errors = JSON.stringify(errorArray) + calculateCost(metricsArray) + newRun.metrics = JSON.stringify(metricsArray) + + const { results, evaluatorMetrics } = await runAdditionalEvaluators( + metricsArray, + actualOutputArray, + errorArray, + body.selectedSimpleEvaluators.length > 0 ? JSON.parse(body.selectedSimpleEvaluators) : [], + workspaceId + ) + + newRun.evaluators = JSON.stringify(results) + evalMetrics.passCount += evaluatorMetrics.passCount + evalMetrics.failCount += evaluatorMetrics.failCount + evalMetrics.errorCount += evaluatorMetrics.errorCount + + if (body.evaluationType === 'llm') { + resultRow.llmConfig = additionalConfig.llmConfig + resultRow.LLMEvaluators = body.selectedLLMEvaluators.length > 0 ? JSON.parse(body.selectedLLMEvaluators) : [] + const llmEvaluatorMap: { evaluatorId: string; evaluator: any }[] = [] + for (let i = 0; i < resultRow.LLMEvaluators.length; i++) { + const evaluatorId = resultRow.LLMEvaluators[i] + const evaluator = await evaluatorsService.getEvaluator(evaluatorId, workspaceId) + llmEvaluatorMap.push({ + evaluatorId: evaluatorId, + evaluator: evaluator + }) + } + // iterate over the actualOutputArray and add the actualOutput to the evaluationLineItem object + const resultArray = await llmEvaluationRunner.runLLMEvaluators( + resultRow, + actualOutputArray, + errorArray, + llmEvaluatorMap + ) + newRun.llmEvaluators = JSON.stringify(resultArray) + const row = appServer.AppDataSource.getRepository(EvaluationRun).create(newRun) + await appServer.AppDataSource.getRepository(EvaluationRun).save(row) + } else { + const row = appServer.AppDataSource.getRepository(EvaluationRun).create(newRun) + await appServer.AppDataSource.getRepository(EvaluationRun).save(row) + } + } + //update the evaluation with status as completed + let passPercent = -1 + if (evalMetrics.passCount + evalMetrics.failCount + evalMetrics.errorCount > 0) { + passPercent = + (evalMetrics.passCount / (evalMetrics.passCount + evalMetrics.failCount + evalMetrics.errorCount)) * 100 + } + appServer.AppDataSource.getRepository(Evaluation) + .findOneBy({ id: newEvaluation.id }) + .then((evaluation) => { + if (evaluation) { + evaluation.status = allRowsSuccessful ? EvaluationStatus.COMPLETED : EvaluationStatus.ERROR + evaluation.average_metrics = JSON.stringify({ + averageLatency: (totalTime / result.rows.length).toFixed(3), + totalRuns: result.rows.length, + ...evalMetrics, + passPcnt: passPercent.toFixed(2) + }) + appServer.AppDataSource.getRepository(Evaluation).save(evaluation) + } + }) + } catch (error) { + //update the evaluation with status as error + appServer.AppDataSource.getRepository(Evaluation) + .findOneBy({ id: newEvaluation.id }) + .then((evaluation) => { + if (evaluation) { + evaluation.status = EvaluationStatus.ERROR + appServer.AppDataSource.getRepository(Evaluation).save(evaluation) + } + }) + } + }) + .catch((error) => { + // Handle errors from runEvaluations + console.error('Error running evaluations:', getErrorMessage(error)) + appServer.AppDataSource.getRepository(Evaluation) + .findOneBy({ id: newEvaluation.id }) + .then((evaluation) => { + if (evaluation) { + evaluation.status = EvaluationStatus.ERROR + evaluation.average_metrics = JSON.stringify({ + error: getErrorMessage(error) + }) + appServer.AppDataSource.getRepository(Evaluation).save(evaluation) + } + }) + .catch((dbError) => { + console.error('Error updating evaluation status:', getErrorMessage(dbError)) + }) + }) + + return getAllEvaluations(body.workspaceId) + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: EvalsService.createEvaluation - ${getErrorMessage(error)}` + ) + } +} + +const getAllEvaluations = async (workspaceId: string, page: number = -1, limit: number = -1) => { + try { + const appServer = getRunningExpressApp() + + // First, get the count of distinct evaluation names for the total + // needed as the The getCount() method in TypeORM doesn't respect the GROUP BY clause and will return the total count of records + const countQuery = appServer.AppDataSource.getRepository(Evaluation) + .createQueryBuilder('ev') + .select('COUNT(DISTINCT(ev.name))', 'count') + .where('ev.workspaceId = :workspaceId', { workspaceId: workspaceId }) + + const totalResult = await countQuery.getRawOne() + const total = totalResult ? parseInt(totalResult.count) : 0 + + // Then get the distinct evaluation names with their counts and latest run date + const namesQueryBuilder = appServer.AppDataSource.getRepository(Evaluation) + .createQueryBuilder('ev') + .select('DISTINCT(ev.name)', 'name') + .addSelect('COUNT(ev.name)', 'count') + .addSelect('MAX(ev.runDate)', 'latestRunDate') + .andWhere('ev.workspaceId = :workspaceId', { workspaceId: workspaceId }) + .groupBy('ev.name') + .orderBy('max(ev.runDate)', 'DESC') // Order by the latest run date + + if (page > 0 && limit > 0) { + namesQueryBuilder.skip((page - 1) * limit) + namesQueryBuilder.take(limit) + } + + const evaluationNames = await namesQueryBuilder.getRawMany() + // Get all evaluations for all names at once in a single query + const returnResults: IEvaluationResult[] = [] + + if (evaluationNames.length > 0) { + const names = evaluationNames.map((item) => item.name) + // Fetch all evaluations for these names in a single query + const allEvaluations = await appServer.AppDataSource.getRepository(Evaluation) + .createQueryBuilder('ev') + .where('ev.name IN (:...names)', { names }) + .andWhere('ev.workspaceId = :workspaceId', { workspaceId }) + .orderBy('ev.name', 'ASC') + .addOrderBy('ev.runDate', 'DESC') + .getMany() + + // Process the results by name + const evaluationsByName = new Map() + // Group evaluations by name + for (const evaluation of allEvaluations) { + if (!evaluationsByName.has(evaluation.name)) { + evaluationsByName.set(evaluation.name, []) + } + evaluationsByName.get(evaluation.name)!.push(evaluation) + } + + // Process each name's evaluations + for (const item of evaluationNames) { + const evaluationsForName = evaluationsByName.get(item.name) || [] + for (let i = 0; i < evaluationsForName.length; i++) { + const evaluation = evaluationsForName[i] as IEvaluationResult + evaluation.latestEval = i === 0 + evaluation.version = parseInt(item.count) - i + returnResults.push(evaluation) + } + } + } + + if (page > 0 && limit > 0) { + return { + total: total, + data: returnResults + } + } else { + return returnResults + } + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: EvalsService.getAllEvaluations - ${getErrorMessage(error)}` + ) + } +} + +// Delete evaluation and all rows via id +const deleteEvaluation = async (id: string, activeWorkspaceId: string) => { + try { + const appServer = getRunningExpressApp() + await appServer.AppDataSource.getRepository(Evaluation).delete({ id: id }) + await appServer.AppDataSource.getRepository(EvaluationRun).delete({ evaluationId: id }) + const results = await appServer.AppDataSource.getRepository(Evaluation).findBy(getWorkspaceSearchOptions(activeWorkspaceId)) + return results + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: EvalsService.deleteEvaluation - ${getErrorMessage(error)}` + ) + } +} + +// check for outdated evaluations +const isOutdated = async (id: string, workspaceId: string) => { + try { + const appServer = getRunningExpressApp() + const evaluation = await appServer.AppDataSource.getRepository(Evaluation).findOneBy({ + id: id, + workspaceId: workspaceId + }) + if (!evaluation) throw new Error(`Evaluation ${id} not found`) + const evaluationRunDate = evaluation.runDate.getTime() + let isOutdated = false + const returnObj: ICommonObject = { + isOutdated: false, + chatflows: [], + dataset: '', + errors: [] + } + + // check if the evaluation is outdated by extracting the runTime and then check with the dataset last updated time as well + // as the chatflows last updated time. If the evaluation is outdated, then return true else return false + const dataset = await appServer.AppDataSource.getRepository(Dataset).findOneBy({ + id: evaluation.datasetId, + workspaceId: workspaceId + }) + if (dataset) { + const datasetLastUpdated = dataset.updatedDate.getTime() + if (datasetLastUpdated > evaluationRunDate) { + isOutdated = true + returnObj.dataset = dataset + } + } else { + returnObj.errors.push({ + message: `Dataset ${evaluation.datasetName} not found`, + id: evaluation.datasetId + }) + isOutdated = true + } + const chatflowIds = evaluation.chatflowId ? JSON.parse(evaluation.chatflowId) : [] + const chatflowNames = evaluation.chatflowName ? JSON.parse(evaluation.chatflowName) : [] + const chatflowTypes = evaluation.additionalConfig ? JSON.parse(evaluation.additionalConfig).chatflowTypes : [] + for (let i = 0; i < chatflowIds.length; i++) { + // check for backward compatibility, as previous versions did not the types in additionalConfig + if (chatflowTypes && chatflowTypes.length >= 0) { + if (chatflowTypes[i] === 'Custom Assistant') { + // if the chatflow type is custom assistant, then we should NOT check in the chatflows table + continue + } + } + const chatflow = await appServer.AppDataSource.getRepository(ChatFlow).findOneBy({ + id: chatflowIds[i], + workspaceId: workspaceId + }) + if (!chatflow) { + returnObj.errors.push({ + message: `Chatflow ${chatflowNames[i]} not found`, + id: chatflowIds[i] + }) + isOutdated = true + } else { + const chatflowLastUpdated = chatflow.updatedDate.getTime() + if (chatflowLastUpdated > evaluationRunDate) { + isOutdated = true + returnObj.chatflows.push({ + chatflowName: chatflowNames[i], + chatflowId: chatflowIds[i], + chatflowType: chatflow.type === 'AGENTFLOW' ? 'Agentflow v2' : 'Chatflow', + isOutdated: true + }) + } + } + } + if (chatflowTypes && chatflowTypes.length > 0) { + for (let i = 0; i < chatflowIds.length; i++) { + if (chatflowTypes[i] !== 'Custom Assistant') { + // if the chatflow type is NOT custom assistant, then bail out for this item + continue + } + const assistant = await appServer.AppDataSource.getRepository(Assistant).findOneBy({ + id: chatflowIds[i], + workspaceId: workspaceId + }) + if (!assistant) { + returnObj.errors.push({ + message: `Custom Assistant ${chatflowNames[i]} not found`, + id: chatflowIds[i] + }) + isOutdated = true + } else { + const chatflowLastUpdated = assistant.updatedDate.getTime() + if (chatflowLastUpdated > evaluationRunDate) { + isOutdated = true + returnObj.chatflows.push({ + chatflowName: chatflowNames[i], + chatflowId: chatflowIds[i], + chatflowType: 'Custom Assistant', + isOutdated: true + }) + } + } + } + } + returnObj.isOutdated = isOutdated + return returnObj + } catch (error) { + throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: EvalsService.isOutdated - ${getErrorMessage(error)}`) + } +} + +const getEvaluation = async (id: string, workspaceId: string) => { + try { + const appServer = getRunningExpressApp() + const evaluation = await appServer.AppDataSource.getRepository(Evaluation).findOneBy({ + id: id, + workspaceId: workspaceId + }) + if (!evaluation) throw new Error(`Evaluation ${id} not found`) + const versionCount = await appServer.AppDataSource.getRepository(Evaluation).countBy({ + name: evaluation.name + }) + const items = await appServer.AppDataSource.getRepository(EvaluationRun).find({ + where: { evaluationId: id } + }) + const versions = (await getVersions(id, workspaceId)).versions + const versionNo = versions.findIndex((version) => version.id === id) + 1 + return { + ...evaluation, + versionCount: versionCount, + versionNo: versionNo, + rows: items + } + } catch (error) { + throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: EvalsService.getEvaluation - ${getErrorMessage(error)}`) + } +} + +const getVersions = async (id: string, workspaceId: string) => { + try { + const appServer = getRunningExpressApp() + const evaluation = await appServer.AppDataSource.getRepository(Evaluation).findOneBy({ + id: id, + workspaceId: workspaceId + }) + if (!evaluation) throw new Error(`Evaluation ${id} not found`) + const versions = await appServer.AppDataSource.getRepository(Evaluation).find({ + where: { + name: evaluation.name + }, + order: { + runDate: 'ASC' + } + }) + const returnResults: { id: string; runDate: Date; version: number }[] = [] + versions.map((version, index) => { + returnResults.push({ + id: version.id, + runDate: version.runDate, + version: index + 1 + }) + }) + return { + versions: returnResults + } + } catch (error) { + throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: EvalsService.getEvaluation - ${getErrorMessage(error)}`) + } +} + +const patchDeleteEvaluations = async (ids: string[] = [], activeWorkspaceId: string, isDeleteAllVersion?: boolean) => { + try { + const appServer = getRunningExpressApp() + const evalsToBeDeleted = await appServer.AppDataSource.getRepository(Evaluation).find({ + where: { + id: In(ids), + workspaceId: activeWorkspaceId + } + }) + await appServer.AppDataSource.getRepository(Evaluation).delete(ids) + for (const evaluation of evalsToBeDeleted) { + await appServer.AppDataSource.getRepository(EvaluationRun).delete({ evaluationId: evaluation.id }) + } + + if (isDeleteAllVersion) { + for (const evaluation of evalsToBeDeleted) { + const otherVersionEvals = await appServer.AppDataSource.getRepository(Evaluation).find({ + where: { + name: evaluation.name + } + }) + if (otherVersionEvals.length > 0) { + await appServer.AppDataSource.getRepository(Evaluation).delete( + [...otherVersionEvals].map((evaluation) => evaluation.id) + ) + for (const otherVersionEval of otherVersionEvals) { + await appServer.AppDataSource.getRepository(EvaluationRun).delete({ evaluationId: otherVersionEval.id }) + } + } + } + } + + const results = await appServer.AppDataSource.getRepository(Evaluation).findBy(getWorkspaceSearchOptions(activeWorkspaceId)) + return results + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: EvalsService.patchDeleteEvaluations - ${getErrorMessage(error)}` + ) + } +} + +export default { + createEvaluation, + getAllEvaluations, + deleteEvaluation, + getEvaluation, + isOutdated, + runAgain, + getVersions, + patchDeleteEvaluations +} diff --git a/packages/server/src/services/evaluator/index.ts b/packages/server/src/services/evaluator/index.ts new file mode 100644 index 000000000..bd3d7e23a --- /dev/null +++ b/packages/server/src/services/evaluator/index.ts @@ -0,0 +1,111 @@ +import { StatusCodes } from 'http-status-codes' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { getErrorMessage } from '../../errors/utils' +import { Evaluator } from '../../database/entities/Evaluator' +import { EvaluatorDTO } from '../../Interface.Evaluation' + +const getAllEvaluators = async (workspaceId: string, page: number = -1, limit: number = -1) => { + try { + const appServer = getRunningExpressApp() + const queryBuilder = appServer.AppDataSource.getRepository(Evaluator).createQueryBuilder('ev').orderBy('ev.updatedDate', 'DESC') + queryBuilder.andWhere('ev.workspaceId = :workspaceId', { workspaceId }) + if (page > 0 && limit > 0) { + queryBuilder.skip((page - 1) * limit) + queryBuilder.take(limit) + } + const [data, total] = await queryBuilder.getManyAndCount() + if (page > 0 && limit > 0) { + return { + total, + data: EvaluatorDTO.fromEntities(data) + } + } else { + return EvaluatorDTO.fromEntities(data) + } + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: evaluatorService.getAllEvaluators - ${getErrorMessage(error)}` + ) + } +} + +const getEvaluator = async (id: string, workspaceId: string) => { + try { + const appServer = getRunningExpressApp() + const evaluator = await appServer.AppDataSource.getRepository(Evaluator).findOneBy({ + id: id, + workspaceId: workspaceId + }) + if (!evaluator) throw new Error(`Evaluator ${id} not found`) + return EvaluatorDTO.fromEntity(evaluator) + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: evaluatorService.getEvaluator - ${getErrorMessage(error)}` + ) + } +} + +// Create new Evaluator +const createEvaluator = async (body: any) => { + try { + const appServer = getRunningExpressApp() + const newDs = EvaluatorDTO.toEntity(body) + + const evaluator = appServer.AppDataSource.getRepository(Evaluator).create(newDs) + const result = await appServer.AppDataSource.getRepository(Evaluator).save(evaluator) + return EvaluatorDTO.fromEntity(result) + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: evaluatorService.createEvaluator - ${getErrorMessage(error)}` + ) + } +} + +// Update Evaluator +const updateEvaluator = async (id: string, body: any, workspaceId: string) => { + try { + const appServer = getRunningExpressApp() + const evaluator = await appServer.AppDataSource.getRepository(Evaluator).findOneBy({ + id: id, + workspaceId: workspaceId + }) + + if (!evaluator) throw new Error(`Evaluator ${id} not found`) + + const updateEvaluator = EvaluatorDTO.toEntity(body) + updateEvaluator.id = id + appServer.AppDataSource.getRepository(Evaluator).merge(evaluator, updateEvaluator) + const result = await appServer.AppDataSource.getRepository(Evaluator).save(evaluator) + return EvaluatorDTO.fromEntity(result) + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: evaluatorService.updateEvaluator - ${getErrorMessage(error)}` + ) + } +} + +// Delete Evaluator via id +const deleteEvaluator = async (id: string, workspaceId: string) => { + try { + const appServer = getRunningExpressApp() + return await appServer.AppDataSource.getRepository(Evaluator).delete({ id: id, workspaceId: workspaceId }) + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: evaluatorService.deleteEvaluator - ${getErrorMessage(error)}` + ) + } +} + +export default { + getAllEvaluators, + getEvaluator, + createEvaluator, + updateEvaluator, + deleteEvaluator +} diff --git a/packages/server/src/services/executions/index.ts b/packages/server/src/services/executions/index.ts index 899d6a092..062337aad 100644 --- a/packages/server/src/services/executions/index.ts +++ b/packages/server/src/services/executions/index.ts @@ -1,29 +1,36 @@ import { StatusCodes } from 'http-status-codes' -import { InternalFlowiseError } from '../../errors/internalFlowiseError' -import { getErrorMessage } from '../../errors/utils' -import { getRunningExpressApp } from '../../utils/getRunningExpressApp' -import { Execution } from '../../database/entities/Execution' -import { ExecutionState, IAgentflowExecutedData } from '../../Interface' import { In } from 'typeorm' import { ChatMessage } from '../../database/entities/ChatMessage' -import { _removeCredentialId } from '../../utils/buildAgentflow' +import { Execution } from '../../database/entities/Execution' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { getErrorMessage } from '../../errors/utils' +import { ExecutionState, IAgentflowExecutedData } from '../../Interface' +import { _removeCredentialId } from '../../utils' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' -interface ExecutionFilters { +export interface ExecutionFilters { id?: string agentflowId?: string + agentflowName?: string sessionId?: string state?: ExecutionState startDate?: Date endDate?: Date page?: number limit?: number + workspaceId?: string } -const getExecutionById = async (executionId: string): Promise => { +const getExecutionById = async (executionId: string, workspaceId?: string): Promise => { try { const appServer = getRunningExpressApp() const executionRepository = appServer.AppDataSource.getRepository(Execution) - const res = await executionRepository.findOne({ where: { id: executionId } }) + + const query: any = { id: executionId } + // Add workspace filtering if provided + if (workspaceId) query.workspaceId = workspaceId + + const res = await executionRepository.findOne({ where: query }) if (!res) { throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Execution ${executionId} not found`) } @@ -59,21 +66,24 @@ const getPublicExecutionById = async (executionId: string): Promise => { try { const appServer = getRunningExpressApp() - const { id, agentflowId, sessionId, state, startDate, endDate, page = 1, limit = 10 } = filters + const { id, agentflowId, agentflowName, sessionId, state, startDate, endDate, page = 1, limit = 12, workspaceId } = filters // Handle UUID fields properly using raw parameters to avoid type conversion issues // This uses the query builder instead of direct objects for compatibility with UUID fields const queryBuilder = appServer.AppDataSource.getRepository(Execution) .createQueryBuilder('execution') .leftJoinAndSelect('execution.agentflow', 'agentflow') - .orderBy('execution.createdDate', 'DESC') + .orderBy('execution.updatedDate', 'DESC') .skip((page - 1) * limit) .take(limit) if (id) queryBuilder.andWhere('execution.id = :id', { id }) if (agentflowId) queryBuilder.andWhere('execution.agentflowId = :agentflowId', { agentflowId }) + if (agentflowName) + queryBuilder.andWhere('LOWER(agentflow.name) LIKE LOWER(:agentflowName)', { agentflowName: `%${agentflowName}%` }) if (sessionId) queryBuilder.andWhere('execution.sessionId = :sessionId', { sessionId }) if (state) queryBuilder.andWhere('execution.state = :state', { state }) + if (workspaceId) queryBuilder.andWhere('execution.workspaceId = :workspaceId', { workspaceId }) // Date range conditions if (startDate && endDate) { @@ -95,12 +105,15 @@ const getAllExecutions = async (filters: ExecutionFilters = {}): Promise<{ data: } } -const updateExecution = async (executionId: string, data: Partial): Promise => { +const updateExecution = async (executionId: string, data: Partial, workspaceId?: string): Promise => { try { const appServer = getRunningExpressApp() - const execution = await appServer.AppDataSource.getRepository(Execution).findOneBy({ - id: executionId - }) + + const query: any = { id: executionId } + // Add workspace filtering if provided + if (workspaceId) query.workspaceId = workspaceId + + const execution = await appServer.AppDataSource.getRepository(Execution).findOneBy(query) if (!execution) { throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Execution ${executionId} not found`) } @@ -120,17 +133,20 @@ const updateExecution = async (executionId: string, data: Partial): P /** * Delete multiple executions by their IDs * @param executionIds Array of execution IDs to delete + * @param workspaceId Optional workspace ID to filter executions * @returns Object with success status and count of deleted executions */ -const deleteExecutions = async (executionIds: string[]): Promise<{ success: boolean; deletedCount: number }> => { +const deleteExecutions = async (executionIds: string[], workspaceId?: string): Promise<{ success: boolean; deletedCount: number }> => { try { const appServer = getRunningExpressApp() const executionRepository = appServer.AppDataSource.getRepository(Execution) - // Delete executions where id is in the provided array - const result = await executionRepository.delete({ - id: In(executionIds) - }) + // Create the where condition with workspace filtering if provided + const whereCondition: any = { id: In(executionIds) } + if (workspaceId) whereCondition.workspaceId = workspaceId + + // Delete executions where id is in the provided array and belongs to the workspace + const result = await executionRepository.delete(whereCondition) // Update chat message executionId column to NULL await appServer.AppDataSource.getRepository(ChatMessage).update({ executionId: In(executionIds) }, { executionId: null as any }) diff --git a/packages/server/src/services/export-import/index.ts b/packages/server/src/services/export-import/index.ts index 3eebc1766..7e2d8f461 100644 --- a/packages/server/src/services/export-import/index.ts +++ b/packages/server/src/services/export-import/index.ts @@ -1,5 +1,5 @@ import { StatusCodes } from 'http-status-codes' -import { In, QueryRunner } from 'typeorm' +import { EntityManager, In, QueryRunner } from 'typeorm' import { v4 as uuidv4 } from 'uuid' import { Assistant } from '../../database/entities/Assistant' import { ChatFlow } from '../../database/entities/ChatFlow' @@ -13,15 +13,20 @@ import { Tool } from '../../database/entities/Tool' import { Variable } from '../../database/entities/Variable' import { InternalFlowiseError } from '../../errors/internalFlowiseError' import { getErrorMessage } from '../../errors/utils' +import assistantsService from '../../services/assistants' +import chatflowsService from '../../services/chatflows' import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { checkUsageLimit } from '../../utils/quotaUsage' import assistantService from '../assistants' import chatMessagesService from '../chat-messages' import chatflowService from '../chatflows' import documenStoreService from '../documentstore' -import executionService from '../executions' +import executionService, { ExecutionFilters } from '../executions' import marketplacesService from '../marketplaces' import toolsService from '../tools' import variableService from '../variables' +import { Platform } from '../../Interface' +import { sanitizeNullBytes } from '../../utils/sanitize.util' type ExportInput = { agentflow: boolean @@ -85,39 +90,71 @@ const convertExportInput = (body: any): ExportInput => { } const FileDefaultName = 'ExportData.json' -const exportData = async (exportInput: ExportInput): Promise<{ FileDefaultName: string } & ExportData> => { +const exportData = async (exportInput: ExportInput, activeWorkspaceId: string): Promise<{ FileDefaultName: string } & ExportData> => { try { - let AgentFlow: ChatFlow[] = exportInput.agentflow === true ? await chatflowService.getAllChatflows('MULTIAGENT') : [] - let AgentFlowV2: ChatFlow[] = exportInput.agentflowv2 === true ? await chatflowService.getAllChatflows('AGENTFLOW') : [] + let AgentFlow: ChatFlow[] | { data: ChatFlow[]; total: number } = + exportInput.agentflow === true ? await chatflowService.getAllChatflows('MULTIAGENT', activeWorkspaceId) : [] + AgentFlow = 'data' in AgentFlow ? AgentFlow.data : AgentFlow - let AssistantCustom: Assistant[] = exportInput.assistantCustom === true ? await assistantService.getAllAssistants('CUSTOM') : [] - let AssistantFlow: ChatFlow[] = exportInput.assistantCustom === true ? await chatflowService.getAllChatflows('ASSISTANT') : [] + let AgentFlowV2: ChatFlow[] | { data: ChatFlow[]; total: number } = + exportInput.agentflowv2 === true ? await chatflowService.getAllChatflows('AGENTFLOW', activeWorkspaceId) : [] + AgentFlowV2 = 'data' in AgentFlowV2 ? AgentFlowV2.data : AgentFlowV2 - let AssistantOpenAI: Assistant[] = exportInput.assistantOpenAI === true ? await assistantService.getAllAssistants('OPENAI') : [] + let AssistantCustom: Assistant[] = + exportInput.assistantCustom === true ? await assistantService.getAllAssistants(activeWorkspaceId, 'CUSTOM') : [] - let AssistantAzure: Assistant[] = exportInput.assistantAzure === true ? await assistantService.getAllAssistants('AZURE') : [] + let AssistantFlow: ChatFlow[] | { data: ChatFlow[]; total: number } = + exportInput.assistantCustom === true ? await chatflowService.getAllChatflows('ASSISTANT', activeWorkspaceId) : [] + AssistantFlow = 'data' in AssistantFlow ? AssistantFlow.data : AssistantFlow - let ChatFlow: ChatFlow[] = exportInput.chatflow === true ? await chatflowService.getAllChatflows('CHATFLOW') : [] + let AssistantOpenAI: Assistant[] = + exportInput.assistantOpenAI === true ? await assistantService.getAllAssistants(activeWorkspaceId, 'OPENAI') : [] - let ChatMessage: ChatMessage[] = exportInput.chat_message === true ? await chatMessagesService.getAllMessages() : [] + let AssistantAzure: Assistant[] = + exportInput.assistantAzure === true ? await assistantService.getAllAssistants(activeWorkspaceId, 'AZURE') : [] + + let ChatFlow: ChatFlow[] | { data: ChatFlow[]; total: number } = + exportInput.chatflow === true ? await chatflowService.getAllChatflows('CHATFLOW', activeWorkspaceId) : [] + ChatFlow = 'data' in ChatFlow ? ChatFlow.data : ChatFlow + + let allChatflow: ChatFlow[] | { data: ChatFlow[]; total: number } = + exportInput.chat_message === true || exportInput.chat_feedback === true + ? await chatflowService.getAllChatflows(undefined, activeWorkspaceId) + : [] + allChatflow = 'data' in allChatflow ? allChatflow.data : allChatflow + const chatflowIds = allChatflow.map((chatflow) => chatflow.id) + + let ChatMessage: ChatMessage[] = + exportInput.chat_message === true ? await chatMessagesService.getMessagesByChatflowIds(chatflowIds) : [] let ChatMessageFeedback: ChatMessageFeedback[] = - exportInput.chat_feedback === true ? await chatMessagesService.getAllMessagesFeedback() : [] + exportInput.chat_feedback === true ? await chatMessagesService.getMessagesFeedbackByChatflowIds(chatflowIds) : [] - let CustomTemplate: CustomTemplate[] = exportInput.custom_template === true ? await marketplacesService.getAllCustomTemplates() : [] - CustomTemplate = CustomTemplate.map((customTemplate) => ({ ...customTemplate, usecases: JSON.stringify(customTemplate.usecases) })) + let CustomTemplate: CustomTemplate[] = + exportInput.custom_template === true ? await marketplacesService.getAllCustomTemplates(activeWorkspaceId) : [] - let DocumentStore: DocumentStore[] = exportInput.document_store === true ? await documenStoreService.getAllDocumentStores() : [] + let DocumentStore: DocumentStore[] | { data: DocumentStore[]; total: number } = + exportInput.document_store === true ? await documenStoreService.getAllDocumentStores(activeWorkspaceId) : [] + DocumentStore = 'data' in DocumentStore ? DocumentStore.data : DocumentStore + + const documentStoreIds = DocumentStore.map((documentStore) => documentStore.id) let DocumentStoreFileChunk: DocumentStoreFileChunk[] = - exportInput.document_store === true ? await documenStoreService.getAllDocumentFileChunks() : [] + exportInput.document_store === true + ? await documenStoreService.getAllDocumentFileChunksByDocumentStoreIds(documentStoreIds) + : [] - const { data: totalExecutions } = exportInput.execution === true ? await executionService.getAllExecutions() : { data: [] } + const filters: ExecutionFilters = { workspaceId: activeWorkspaceId } + const { data: totalExecutions } = exportInput.execution === true ? await executionService.getAllExecutions(filters) : { data: [] } let Execution: Execution[] = exportInput.execution === true ? totalExecutions : [] - let Tool: Tool[] = exportInput.tool === true ? await toolsService.getAllTools() : [] + let Tool: Tool[] | { data: Tool[]; total: number } = + exportInput.tool === true ? await toolsService.getAllTools(activeWorkspaceId) : [] + Tool = 'data' in Tool ? Tool.data : Tool - let Variable: Variable[] = exportInput.variable === true ? await variableService.getAllVariables() : [] + let Variable: Variable[] | { data: Variable[]; total: number } = + exportInput.variable === true ? await variableService.getAllVariables(activeWorkspaceId) : [] + Variable = 'data' in Variable ? Variable.data : Variable return { FileDefaultName, @@ -187,7 +224,12 @@ async function replaceDuplicateIdsForAssistant(queryRunner: QueryRunner, origina } } -async function replaceDuplicateIdsForChatMessage(queryRunner: QueryRunner, originalData: ExportData, chatMessages: ChatMessage[]) { +async function replaceDuplicateIdsForChatMessage( + queryRunner: QueryRunner, + originalData: ExportData, + chatMessages: ChatMessage[], + activeWorkspaceId?: string +) { try { const chatmessageChatflowIds = chatMessages.map((chatMessage) => { return { id: chatMessage.chatflowid, qty: 0 } @@ -205,7 +247,10 @@ async function replaceDuplicateIdsForChatMessage(queryRunner: QueryRunner, origi }) const databaseChatflowIds = await ( await queryRunner.manager.find(ChatFlow, { - where: { id: In(chatmessageChatflowIds.map((chatmessageChatflowId) => chatmessageChatflowId.id)) } + where: { + id: In(chatmessageChatflowIds.map((chatmessageChatflowId) => chatmessageChatflowId.id)), + workspaceId: activeWorkspaceId + } }) ).map((chatflow) => chatflow.id) chatmessageChatflowIds.forEach((item) => { @@ -225,11 +270,25 @@ async function replaceDuplicateIdsForChatMessage(queryRunner: QueryRunner, origi where: { id: In(ids) } }) if (records.length < 0) return originalData - for (let record of records) { - const oldId = record.id - const newId = uuidv4() - originalData = JSON.parse(JSON.stringify(originalData).replaceAll(oldId, newId)) - } + + // Replace duplicate ChatMessage ids found in db with new ids, + // and update corresponding messageId references in ChatMessageFeedback + const idMap: { [key: string]: string } = {} + const dbExistingIds = new Set(records.map((record) => record.id)) + originalData.ChatMessage = originalData.ChatMessage.map((item) => { + if (dbExistingIds.has(item.id)) { + const newId = uuidv4() + idMap[item.id] = newId + return { ...item, id: newId } + } + return item + }) + originalData.ChatMessageFeedback = originalData.ChatMessageFeedback.map((item) => { + if (idMap[item.messageId]) { + return { ...item, messageId: idMap[item.messageId] } + } + return item + }) return originalData } catch (error) { throw new InternalFlowiseError( @@ -239,7 +298,12 @@ async function replaceDuplicateIdsForChatMessage(queryRunner: QueryRunner, origi } } -async function replaceExecutionIdForChatMessage(queryRunner: QueryRunner, originalData: ExportData, chatMessages: ChatMessage[]) { +async function replaceExecutionIdForChatMessage( + queryRunner: QueryRunner, + originalData: ExportData, + chatMessages: ChatMessage[], + activeWorkspaceId?: string +) { try { // step 1 - get all execution ids from chatMessages const chatMessageExecutionIds = chatMessages @@ -259,7 +323,10 @@ async function replaceExecutionIdForChatMessage(queryRunner: QueryRunner, origin // step 3 - increase qty if execution id is in database const databaseExecutionIds = await ( await queryRunner.manager.find(Execution, { - where: { id: In(chatMessageExecutionIds.map((chatMessageExecutionId) => chatMessageExecutionId.id)) } + where: { + id: In(chatMessageExecutionIds.map((chatMessageExecutionId) => chatMessageExecutionId.id)), + workspaceId: activeWorkspaceId + } }) ).map((execution) => execution.id) chatMessageExecutionIds.forEach((item) => { @@ -290,7 +357,8 @@ async function replaceExecutionIdForChatMessage(queryRunner: QueryRunner, origin async function replaceDuplicateIdsForChatMessageFeedback( queryRunner: QueryRunner, originalData: ExportData, - chatMessageFeedbacks: ChatMessageFeedback[] + chatMessageFeedbacks: ChatMessageFeedback[], + activeWorkspaceId?: string ) { try { const feedbackChatflowIds = chatMessageFeedbacks.map((feedback) => { @@ -309,7 +377,7 @@ async function replaceDuplicateIdsForChatMessageFeedback( }) const databaseChatflowIds = await ( await queryRunner.manager.find(ChatFlow, { - where: { id: In(feedbackChatflowIds.map((feedbackChatflowId) => feedbackChatflowId.id)) } + where: { id: In(feedbackChatflowIds.map((feedbackChatflowId) => feedbackChatflowId.id)), workspaceId: activeWorkspaceId } }) ).map((chatflow) => chatflow.id) feedbackChatflowIds.forEach((item) => { @@ -352,12 +420,28 @@ async function replaceDuplicateIdsForChatMessageFeedback( const records = await queryRunner.manager.find(ChatMessageFeedback, { where: { id: In(ids) } }) + + // remove duplicate messageId + const seenMessageIds = new Set() + originalData.ChatMessageFeedback = originalData.ChatMessageFeedback.filter((feedback) => { + if (seenMessageIds.has(feedback.messageId)) { + return false + } + seenMessageIds.add(feedback.messageId) + return true + }) + if (records.length < 0) return originalData - for (let record of records) { - const oldId = record.id - const newId = uuidv4() - originalData = JSON.parse(JSON.stringify(originalData).replaceAll(oldId, newId)) - } + + // replace duplicate ids found in db to new id + const dbExistingIds = new Set(records.map((record) => record.id)) + originalData.ChatMessageFeedback = originalData.ChatMessageFeedback.map((item) => { + if (dbExistingIds.has(item.id)) { + const newId = uuidv4() + return { ...item, id: newId } + } + return item + }) return originalData } catch (error) { throw new InternalFlowiseError( @@ -420,11 +504,15 @@ async function replaceDuplicateIdsForDocumentStoreFileChunk( where: { id: In(ids) } }) if (records.length < 0) return originalData - for (let record of records) { - const oldId = record.id - const newId = uuidv4() - originalData = JSON.parse(JSON.stringify(originalData).replaceAll(oldId, newId)) - } + + // replace duplicate ids found in db to new id + const dbExistingIds = new Set(records.map((record) => record.id)) + originalData.DocumentStoreFileChunk = originalData.DocumentStoreFileChunk.map((item) => { + if (dbExistingIds.has(item.id)) { + return { ...item, id: uuidv4() } + } + return item + }) return originalData } catch (error) { throw new InternalFlowiseError( @@ -461,6 +549,8 @@ async function replaceDuplicateIdsForVariable(queryRunner: QueryRunner, original const records = await queryRunner.manager.find(Variable, { where: { id: In(ids) } }) + if (getRunningExpressApp().identityManager.getPlatformType() === Platform.CLOUD) + originalData.Variable = originalData.Variable.filter((variable) => variable.type !== 'runtime') if (records.length < 0) return originalData for (let record of records) { const oldId = record.id @@ -503,7 +593,22 @@ function reduceSpaceForChatflowFlowData(chatflows: ChatFlow[]) { }) } -const importData = async (importData: ExportData) => { +function insertWorkspaceId(importedData: any, activeWorkspaceId?: string) { + if (!activeWorkspaceId) return importedData + importedData.forEach((item: any) => { + item.workspaceId = activeWorkspaceId + }) + return importedData +} + +async function saveBatch(manager: EntityManager, entity: any, items: any[], batchSize = 900) { + for (let i = 0; i < items.length; i += batchSize) { + const batch = items.slice(i, i + batchSize) + await manager.save(entity, batch) + } +} + +const importData = async (importData: ExportData, orgId: string, activeWorkspaceId: string, subscriptionId: string) => { // Initialize missing properties with empty arrays to avoid "undefined" errors importData.AgentFlow = importData.AgentFlow || [] importData.AgentFlowV2 = importData.AgentFlowV2 || [] @@ -529,43 +634,127 @@ const importData = async (importData: ExportData) => { try { if (importData.AgentFlow.length > 0) { importData.AgentFlow = reduceSpaceForChatflowFlowData(importData.AgentFlow) + importData.AgentFlow = insertWorkspaceId(importData.AgentFlow, activeWorkspaceId) + const existingChatflowCount = await chatflowsService.getAllChatflowsCountByOrganization('MULTIAGENT', orgId) + const newChatflowCount = importData.AgentFlow.length + await checkUsageLimit( + 'flows', + subscriptionId, + getRunningExpressApp().usageCacheManager, + existingChatflowCount + newChatflowCount + ) importData = await replaceDuplicateIdsForChatFlow(queryRunner, importData, importData.AgentFlow) } if (importData.AgentFlowV2.length > 0) { importData.AgentFlowV2 = reduceSpaceForChatflowFlowData(importData.AgentFlowV2) + importData.AgentFlowV2 = insertWorkspaceId(importData.AgentFlowV2, activeWorkspaceId) + const existingChatflowCount = await chatflowsService.getAllChatflowsCountByOrganization('AGENTFLOW', orgId) + const newChatflowCount = importData.AgentFlowV2.length + await checkUsageLimit( + 'flows', + subscriptionId, + getRunningExpressApp().usageCacheManager, + existingChatflowCount + newChatflowCount + ) importData = await replaceDuplicateIdsForChatFlow(queryRunner, importData, importData.AgentFlowV2) } - if (importData.AssistantCustom.length > 0) + if (importData.AssistantCustom.length > 0) { + importData.AssistantCustom = insertWorkspaceId(importData.AssistantCustom, activeWorkspaceId) + const existingAssistantCount = await assistantsService.getAssistantsCountByOrganization('CUSTOM', orgId) + const newAssistantCount = importData.AssistantCustom.length + await checkUsageLimit( + 'flows', + subscriptionId, + getRunningExpressApp().usageCacheManager, + existingAssistantCount + newAssistantCount + ) importData = await replaceDuplicateIdsForAssistant(queryRunner, importData, importData.AssistantCustom) + } if (importData.AssistantFlow.length > 0) { importData.AssistantFlow = reduceSpaceForChatflowFlowData(importData.AssistantFlow) + importData.AssistantFlow = insertWorkspaceId(importData.AssistantFlow, activeWorkspaceId) + const existingChatflowCount = await chatflowsService.getAllChatflowsCountByOrganization('ASSISTANT', orgId) + const newChatflowCount = importData.AssistantFlow.length + await checkUsageLimit( + 'flows', + subscriptionId, + getRunningExpressApp().usageCacheManager, + existingChatflowCount + newChatflowCount + ) importData = await replaceDuplicateIdsForChatFlow(queryRunner, importData, importData.AssistantFlow) } - if (importData.AssistantOpenAI.length > 0) + if (importData.AssistantOpenAI.length > 0) { + importData.AssistantOpenAI = insertWorkspaceId(importData.AssistantOpenAI, activeWorkspaceId) + const existingAssistantCount = await assistantsService.getAssistantsCountByOrganization('OPENAI', orgId) + const newAssistantCount = importData.AssistantOpenAI.length + await checkUsageLimit( + 'flows', + subscriptionId, + getRunningExpressApp().usageCacheManager, + existingAssistantCount + newAssistantCount + ) importData = await replaceDuplicateIdsForAssistant(queryRunner, importData, importData.AssistantOpenAI) - if (importData.AssistantAzure.length > 0) + } + if (importData.AssistantAzure.length > 0) { + importData.AssistantAzure = insertWorkspaceId(importData.AssistantAzure, activeWorkspaceId) + const existingAssistantCount = await assistantsService.getAssistantsCountByOrganization('AZURE', orgId) + const newAssistantCount = importData.AssistantAzure.length + await checkUsageLimit( + 'flows', + subscriptionId, + getRunningExpressApp().usageCacheManager, + existingAssistantCount + newAssistantCount + ) importData = await replaceDuplicateIdsForAssistant(queryRunner, importData, importData.AssistantAzure) + } if (importData.ChatFlow.length > 0) { importData.ChatFlow = reduceSpaceForChatflowFlowData(importData.ChatFlow) + importData.ChatFlow = insertWorkspaceId(importData.ChatFlow, activeWorkspaceId) + const existingChatflowCount = await chatflowsService.getAllChatflowsCountByOrganization('CHATFLOW', orgId) + const newChatflowCount = importData.ChatFlow.length + await checkUsageLimit( + 'flows', + subscriptionId, + getRunningExpressApp().usageCacheManager, + existingChatflowCount + newChatflowCount + ) importData = await replaceDuplicateIdsForChatFlow(queryRunner, importData, importData.ChatFlow) } if (importData.ChatMessage.length > 0) { - importData = await replaceDuplicateIdsForChatMessage(queryRunner, importData, importData.ChatMessage) - importData = await replaceExecutionIdForChatMessage(queryRunner, importData, importData.ChatMessage) + importData = await replaceDuplicateIdsForChatMessage(queryRunner, importData, importData.ChatMessage, activeWorkspaceId) + importData = await replaceExecutionIdForChatMessage(queryRunner, importData, importData.ChatMessage, activeWorkspaceId) } if (importData.ChatMessageFeedback.length > 0) - importData = await replaceDuplicateIdsForChatMessageFeedback(queryRunner, importData, importData.ChatMessageFeedback) - if (importData.CustomTemplate.length > 0) + importData = await replaceDuplicateIdsForChatMessageFeedback( + queryRunner, + importData, + importData.ChatMessageFeedback, + activeWorkspaceId + ) + if (importData.CustomTemplate.length > 0) { + importData.CustomTemplate = insertWorkspaceId(importData.CustomTemplate, activeWorkspaceId) importData = await replaceDuplicateIdsForCustomTemplate(queryRunner, importData, importData.CustomTemplate) - if (importData.DocumentStore.length > 0) + } + if (importData.DocumentStore.length > 0) { + importData.DocumentStore = insertWorkspaceId(importData.DocumentStore, activeWorkspaceId) importData = await replaceDuplicateIdsForDocumentStore(queryRunner, importData, importData.DocumentStore) + } if (importData.DocumentStoreFileChunk.length > 0) importData = await replaceDuplicateIdsForDocumentStoreFileChunk(queryRunner, importData, importData.DocumentStoreFileChunk) - if (importData.Tool.length > 0) importData = await replaceDuplicateIdsForTool(queryRunner, importData, importData.Tool) - if (importData.Execution.length > 0) + if (importData.Tool.length > 0) { + importData.Tool = insertWorkspaceId(importData.Tool, activeWorkspaceId) + importData = await replaceDuplicateIdsForTool(queryRunner, importData, importData.Tool) + } + if (importData.Execution.length > 0) { + importData.Execution = insertWorkspaceId(importData.Execution, activeWorkspaceId) importData = await replaceDuplicateIdsForExecution(queryRunner, importData, importData.Execution) - if (importData.Variable.length > 0) + } + if (importData.Variable.length > 0) { + importData.Variable = insertWorkspaceId(importData.Variable, activeWorkspaceId) importData = await replaceDuplicateIdsForVariable(queryRunner, importData, importData.Variable) + } + + importData = sanitizeNullBytes(importData) await queryRunner.startTransaction() @@ -576,23 +765,23 @@ const importData = async (importData: ExportData) => { if (importData.AssistantOpenAI.length > 0) await queryRunner.manager.save(Assistant, importData.AssistantOpenAI) if (importData.AssistantAzure.length > 0) await queryRunner.manager.save(Assistant, importData.AssistantAzure) if (importData.ChatFlow.length > 0) await queryRunner.manager.save(ChatFlow, importData.ChatFlow) - if (importData.ChatMessage.length > 0) await queryRunner.manager.save(ChatMessage, importData.ChatMessage) + if (importData.ChatMessage.length > 0) await saveBatch(queryRunner.manager, ChatMessage, importData.ChatMessage) if (importData.ChatMessageFeedback.length > 0) await queryRunner.manager.save(ChatMessageFeedback, importData.ChatMessageFeedback) if (importData.CustomTemplate.length > 0) await queryRunner.manager.save(CustomTemplate, importData.CustomTemplate) if (importData.DocumentStore.length > 0) await queryRunner.manager.save(DocumentStore, importData.DocumentStore) if (importData.DocumentStoreFileChunk.length > 0) - await queryRunner.manager.save(DocumentStoreFileChunk, importData.DocumentStoreFileChunk) + await saveBatch(queryRunner.manager, DocumentStoreFileChunk, importData.DocumentStoreFileChunk) if (importData.Tool.length > 0) await queryRunner.manager.save(Tool, importData.Tool) if (importData.Execution.length > 0) await queryRunner.manager.save(Execution, importData.Execution) if (importData.Variable.length > 0) await queryRunner.manager.save(Variable, importData.Variable) await queryRunner.commitTransaction() } catch (error) { - if (queryRunner && !queryRunner.isTransactionActive) await queryRunner.rollbackTransaction() + if (queryRunner.isTransactionActive) await queryRunner.rollbackTransaction() throw error } finally { - if (queryRunner && !queryRunner.isReleased) await queryRunner.release() + if (!queryRunner.isReleased) await queryRunner.release() } } catch (error) { throw new InternalFlowiseError( diff --git a/packages/server/src/services/fetch-links/index.ts b/packages/server/src/services/fetch-links/index.ts index 53c6e94e9..cf5551512 100644 --- a/packages/server/src/services/fetch-links/index.ts +++ b/packages/server/src/services/fetch-links/index.ts @@ -1,4 +1,4 @@ -import { webCrawl, xmlScrape } from 'flowise-components' +import { webCrawl, xmlScrape, checkDenyList } from 'flowise-components' import { StatusCodes } from 'http-status-codes' import { InternalFlowiseError } from '../../errors/internalFlowiseError' import { getErrorMessage } from '../../errors/utils' @@ -6,6 +6,8 @@ import { getErrorMessage } from '../../errors/utils' const getAllLinks = async (requestUrl: string, relativeLinksMethod: string, queryLimit: string): Promise => { try { const url = decodeURIComponent(requestUrl) + await checkDenyList(url) + if (!relativeLinksMethod) { throw new InternalFlowiseError( StatusCodes.INTERNAL_SERVER_ERROR, diff --git a/packages/server/src/services/flow-configs/index.ts b/packages/server/src/services/flow-configs/index.ts index 8ce05499f..7755e86f3 100644 --- a/packages/server/src/services/flow-configs/index.ts +++ b/packages/server/src/services/flow-configs/index.ts @@ -6,10 +6,10 @@ import chatflowsService from '../chatflows' import { InternalFlowiseError } from '../../errors/internalFlowiseError' import { getErrorMessage } from '../../errors/utils' -const getSingleFlowConfig = async (chatflowId: string): Promise => { +const getSingleFlowConfig = async (chatflowId: string, workspaceId: string): Promise => { try { const appServer = getRunningExpressApp() - const chatflow = await chatflowsService.getChatflowById(chatflowId) + const chatflow = await chatflowsService.getChatflowById(chatflowId, workspaceId) if (!chatflow) { throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Chatflow ${chatflowId} not found in the database!`) } diff --git a/packages/server/src/services/log/index.ts b/packages/server/src/services/log/index.ts new file mode 100644 index 000000000..1f4227f61 --- /dev/null +++ b/packages/server/src/services/log/index.ts @@ -0,0 +1,94 @@ +import path from 'path' +import * as fs from 'fs' +import { StatusCodes } from 'http-status-codes' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { getErrorMessage } from '../../errors/utils' +import readline from 'readline' + +const readFile = (filePath: string) => { + return new Promise(function (resolve, reject) { + const lines: string[] = [] + var rl = readline.createInterface({ + input: fs.createReadStream(filePath) + }) + + rl.on('line', (line) => { + lines.push(line) + }) + + rl.on('close', () => { + // Add newlines to lines + resolve(lines.join('\n')) + }) + + rl.on('error', (error) => { + reject(`Error reading file ${filePath}: ${error}`) + }) + }) +} + +const generateDateRange = (startDate: string, endDate: string) => { + const start = startDate.split('-') + const end = endDate.split('-') + const startYear = parseInt(start[0], 10) + const startMonth = parseInt(start[1], 10) - 1 // JS months are 0-indexed + const startDay = parseInt(start[2], 10) + const startHour = parseInt(start[3], 10) + + const endYear = parseInt(end[0], 10) + const endMonth = parseInt(end[1], 10) - 1 + const endDay = parseInt(end[2], 10) + const endHour = parseInt(end[3], 10) + + const result = [] + const startTime = new Date(startYear, startMonth, startDay, startHour) + const endTime = new Date(endYear, endMonth, endDay, endHour) + + for (let time = startTime; time <= endTime; time.setHours(time.getHours() + 1)) { + const year = time.getFullYear() + const month = (time.getMonth() + 1).toString().padStart(2, '0') + const day = time.getDate().toString().padStart(2, '0') + const hour = time.getHours().toString().padStart(2, '0') + result.push(`${year}-${month}-${day}-${hour}`) + } + + return result +} + +const getLogs = async (startDate?: string, endDate?: string) => { + if (!startDate || !endDate) { + throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: logService.getLogs - No start date or end date provided`) + } + + if (startDate > endDate) { + throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: logService.getLogs - Start date is greater than end date`) + } + + try { + var promises = [] + const files = generateDateRange(startDate, endDate) + + for (let i = 0; i < files.length; i++) { + const date = files[i] + const filePath = process.env.LOG_PATH + ? path.resolve(process.env.LOG_PATH, `server.log.${date}`) + : path.join(__dirname, '..', '..', '..', 'logs', `server.log.${date}`) + if (fs.existsSync(filePath)) { + promises.push(readFile(filePath)) + } else { + // console.error(`File ${filePath} not found`) + } + + if (i === files.length - 1) { + const results = await Promise.all(promises) + return results + } + } + } catch (error) { + throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: logService.getLogs - ${getErrorMessage(error)}`) + } +} + +export default { + getLogs +} diff --git a/packages/server/src/services/marketplaces/index.ts b/packages/server/src/services/marketplaces/index.ts index 33bc54920..ef50ad687 100644 --- a/packages/server/src/services/marketplaces/index.ts +++ b/packages/server/src/services/marketplaces/index.ts @@ -1,14 +1,15 @@ -import path from 'path' import * as fs from 'fs' import { StatusCodes } from 'http-status-codes' +import path from 'path' +import { DeleteResult } from 'typeorm' +import { v4 as uuidv4 } from 'uuid' +import { CustomTemplate } from '../../database/entities/CustomTemplate' +import { WorkspaceService } from '../../enterprise/services/workspace.service' +import { getWorkspaceSearchOptions } from '../../enterprise/utils/ControllerServiceUtils' import { InternalFlowiseError } from '../../errors/internalFlowiseError' import { getErrorMessage } from '../../errors/utils' import { IReactFlowEdge, IReactFlowNode } from '../../Interface' import { getRunningExpressApp } from '../../utils/getRunningExpressApp' -import { DeleteResult } from 'typeorm' -import { CustomTemplate } from '../../database/entities/CustomTemplate' -import { v4 as uuidv4 } from 'uuid' - import chatflowsService from '../chatflows' type ITemplate = { @@ -68,6 +69,8 @@ const getAllTemplates = async () => { templates.push(template) }) + /* + * Agentflow is deprecated marketplaceDir = path.join(__dirname, '..', '..', '..', 'marketplaces', 'agentflows') jsonsInDir = fs.readdirSync(marketplaceDir).filter((file) => path.extname(file) === '.json') jsonsInDir.forEach((file) => { @@ -86,7 +89,7 @@ const getAllTemplates = async () => { description: fileDataObj?.description || '' } templates.push(template) - }) + })*/ marketplaceDir = path.join(__dirname, '..', '..', '..', 'marketplaces', 'agentflowsv2') jsonsInDir = fs.readdirSync(marketplaceDir).filter((file) => path.extname(file) === '.json') @@ -107,11 +110,24 @@ const getAllTemplates = async () => { } templates.push(template) }) - const sortedTemplates = templates.sort((a, b) => a.templateName.localeCompare(b.templateName)) - const FlowiseDocsQnAIndex = sortedTemplates.findIndex((tmp) => tmp.templateName === 'Flowise Docs QnA') - if (FlowiseDocsQnAIndex > 0) { - sortedTemplates.unshift(sortedTemplates.splice(FlowiseDocsQnAIndex, 1)[0]) - } + const sortedTemplates = templates.sort((a, b) => { + // Prioritize AgentflowV2 templates first + if (a.type === 'AgentflowV2' && b.type !== 'AgentflowV2') { + return -1 + } + if (b.type === 'AgentflowV2' && a.type !== 'AgentflowV2') { + return 1 + } + // Put Tool templates last + if (a.type === 'Tool' && b.type !== 'Tool') { + return 1 + } + if (b.type === 'Tool' && a.type !== 'Tool') { + return -1 + } + // For same types, sort alphabetically by templateName + return a.templateName.localeCompare(b.templateName) + }) const dbResponse = sortedTemplates return dbResponse } catch (error) { @@ -122,10 +138,10 @@ const getAllTemplates = async () => { } } -const deleteCustomTemplate = async (templateId: string): Promise => { +const deleteCustomTemplate = async (templateId: string, workspaceId: string): Promise => { try { const appServer = getRunningExpressApp() - return await appServer.AppDataSource.getRepository(CustomTemplate).delete({ id: templateId }) + return await appServer.AppDataSource.getRepository(CustomTemplate).delete({ id: templateId, workspaceId: workspaceId }) } catch (error) { throw new InternalFlowiseError( StatusCodes.INTERNAL_SERVER_ERROR, @@ -134,30 +150,50 @@ const deleteCustomTemplate = async (templateId: string): Promise = } } -const getAllCustomTemplates = async (): Promise => { +const _modifyTemplates = (templates: any[]) => { + templates.map((template) => { + template.usecases = template.usecases ? JSON.parse(template.usecases) : '' + if (template.type === 'Tool') { + template.flowData = JSON.parse(template.flowData) + template.iconSrc = template.flowData.iconSrc + template.schema = template.flowData.schema + template.func = template.flowData.func + template.categories = [] + template.flowData = undefined + } else { + template.categories = getCategories(JSON.parse(template.flowData)) + } + if (!template.badge) { + template.badge = '' + } + if (!template.framework) { + template.framework = '' + } + }) +} + +const getAllCustomTemplates = async (workspaceId?: string): Promise => { try { const appServer = getRunningExpressApp() - const templates: any[] = await appServer.AppDataSource.getRepository(CustomTemplate).find() - templates.map((template) => { - template.usecases = template.usecases ? JSON.parse(template.usecases) : '' - if (template.type === 'Tool') { - template.flowData = JSON.parse(template.flowData) - template.iconSrc = template.flowData.iconSrc - template.schema = template.flowData.schema - template.func = template.flowData.func - template.categories = [] - template.flowData = undefined - } else { - template.categories = getCategories(JSON.parse(template.flowData)) + const templates: any[] = await appServer.AppDataSource.getRepository(CustomTemplate).findBy(getWorkspaceSearchOptions(workspaceId)) + const dbResponse = [] + _modifyTemplates(templates) + dbResponse.push(...templates) + // get shared credentials + if (workspaceId) { + const workspaceService = new WorkspaceService() + const sharedItems = (await workspaceService.getSharedItemsForWorkspace(workspaceId, 'custom_template')) as CustomTemplate[] + if (sharedItems && sharedItems.length) { + _modifyTemplates(sharedItems) + // add shared = true flag to all shared items, to differentiate them in the UI + sharedItems.forEach((sharedItem) => { + // @ts-ignore + sharedItem.shared = true + dbResponse.push(sharedItem) + }) } - if (!template.badge) { - template.badge = '' - } - if (!template.framework) { - template.framework = '' - } - }) - return templates + } + return dbResponse } catch (error) { throw new InternalFlowiseError( StatusCodes.INTERNAL_SERVER_ERROR, @@ -175,7 +211,7 @@ const saveCustomTemplate = async (body: any): Promise => { Object.assign(customTemplate, body) if (body.chatflowId) { - const chatflow = await chatflowsService.getChatflowById(body.chatflowId) + const chatflow = await chatflowsService.getChatflowById(body.chatflowId, body.workspaceId) const flowData = JSON.parse(chatflow.flowData) const { framework, exportJson } = _generateExportFlowData(flowData) flowDataStr = JSON.stringify(exportJson) diff --git a/packages/server/src/services/nodes/index.ts b/packages/server/src/services/nodes/index.ts index f0e8b3f53..fbe08368a 100644 --- a/packages/server/src/services/nodes/index.ts +++ b/packages/server/src/services/nodes/index.ts @@ -102,7 +102,9 @@ const getSingleNodeAsyncOptions = async (nodeName: string, requestBody: any): Pr databaseEntities: databaseEntities, componentNodes: appServer.nodesPool.componentNodes, previousNodes: requestBody.previousNodes, - currentNode: requestBody.currentNode + currentNode: requestBody.currentNode, + searchOptions: requestBody.searchOptions, + cachePool: appServer.cachePool }) return dbResponse @@ -121,20 +123,22 @@ const getSingleNodeAsyncOptions = async (nodeName: string, requestBody: any): Pr } // execute custom function node -const executeCustomFunction = async (requestBody: any) => { +const executeCustomFunction = async (requestBody: any, workspaceId?: string, orgId?: string) => { const appServer = getRunningExpressApp() const executeData = { appDataSource: appServer.AppDataSource, componentNodes: appServer.nodesPool.componentNodes, data: requestBody, - isExecuteCustomFunction: true + isExecuteCustomFunction: true, + orgId, + workspaceId } if (process.env.MODE === MODE.QUEUE) { const predictionQueue = appServer.queueManager.getQueue('prediction') const job = await predictionQueue.addJob(omit(executeData, OMIT_QUEUE_JOB_DATA)) - logger.debug(`[server]: Execute Custom Function Job added to queue: ${job.id}`) + logger.debug(`[server]: Execute Custom Function Job added to queue by ${orgId}: ${job.id}`) const queueEvents = predictionQueue.getQueueEvents() const result = await job.waitUntilFinished(queueEvents) diff --git a/packages/server/src/services/openai-realtime/index.ts b/packages/server/src/services/openai-realtime/index.ts index c1d9c6251..8877474bd 100644 --- a/packages/server/src/services/openai-realtime/index.ts +++ b/packages/server/src/services/openai-realtime/index.ts @@ -10,6 +10,7 @@ import { getStartingNodes, resolveVariables } from '../../utils' +import { checkStorage, updateStorageUsage } from '../../utils/quotaUsage' import { getRunningExpressApp } from '../../utils/getRunningExpressApp' import { ChatFlow } from '../../database/entities/ChatFlow' import { IDepthQueue, IReactFlowNode } from '../../Interface' @@ -17,9 +18,13 @@ import { ICommonObject, INodeData } from 'flowise-components' import { convertToOpenAIFunction } from '@langchain/core/utils/function_calling' import { v4 as uuidv4 } from 'uuid' import { Variable } from '../../database/entities/Variable' +import { getWorkspaceSearchOptions } from '../../enterprise/utils/ControllerServiceUtils' +import { Workspace } from '../../enterprise/database/entities/workspace.entity' +import { Organization } from '../../enterprise/database/entities/organization.entity' const SOURCE_DOCUMENTS_PREFIX = '\n\n----FLOWISE_SOURCE_DOCUMENTS----\n\n' const ARTIFACTS_PREFIX = '\n\n----FLOWISE_ARTIFACTS----\n\n' +const TOOL_ARGS_PREFIX = '\n\n----FLOWISE_TOOL_ARGS----\n\n' const buildAndInitTool = async (chatflowid: string, _chatId?: string, _apiMessageId?: string) => { const appServer = getRunningExpressApp() @@ -60,9 +65,30 @@ const buildAndInitTool = async (chatflowid: string, _chatId?: string, _apiMessag } startingNodeIds = [...new Set(startingNodeIds)] - const availableVariables = await appServer.AppDataSource.getRepository(Variable).find() + /*** Get API Config ***/ + const availableVariables = await appServer.AppDataSource.getRepository(Variable).findBy(getWorkspaceSearchOptions(chatflow.workspaceId)) const { nodeOverrides, variableOverrides, apiOverrideStatus } = getAPIOverrideConfig(chatflow) + // This can be public API, so we can only get orgId from the chatflow + const chatflowWorkspaceId = chatflow.workspaceId + const workspace = await appServer.AppDataSource.getRepository(Workspace).findOneBy({ + id: chatflowWorkspaceId + }) + if (!workspace) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Workspace ${chatflowWorkspaceId} not found`) + } + const workspaceId = workspace.id + + const org = await appServer.AppDataSource.getRepository(Organization).findOneBy({ + id: workspace.organizationId + }) + if (!org) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Organization ${workspace.organizationId} not found`) + } + + const orgId = org.id + const subscriptionId = org.subscriptionId + const reactFlowNodes = await buildFlow({ startingNodeIds, reactFlowNodes: nodes, @@ -77,10 +103,17 @@ const buildAndInitTool = async (chatflowid: string, _chatId?: string, _apiMessag chatflowid, apiMessageId, appDataSource: appServer.AppDataSource, + usageCacheManager: appServer.usageCacheManager, + cachePool: appServer.cachePool, apiOverrideStatus, nodeOverrides, availableVariables, - variableOverrides + variableOverrides, + orgId, + workspaceId, + subscriptionId, + updateStorageUsage, + checkStorage }) const nodeToExecute = @@ -113,6 +146,8 @@ const buildAndInitTool = async (chatflowid: string, _chatId?: string, _apiMessag const agent = await nodeInstance.init(nodeToExecuteData, '', { chatflowid, chatId, + orgId, + workspaceId, appDataSource: appServer.AppDataSource, databaseEntities, analytic: chatflow.analytic @@ -182,6 +217,11 @@ const executeAgentTool = async ( } } + if (typeof toolOutput === 'string' && toolOutput.includes(TOOL_ARGS_PREFIX)) { + const _splitted = toolOutput.split(TOOL_ARGS_PREFIX) + toolOutput = _splitted[0] + } + return { output: toolOutput, sourceDocuments, diff --git a/packages/server/src/services/predictions/index.ts b/packages/server/src/services/predictions/index.ts index 6f2dbe199..5d1d71ec0 100644 --- a/packages/server/src/services/predictions/index.ts +++ b/packages/server/src/services/predictions/index.ts @@ -4,9 +4,9 @@ import { utilBuildChatflow } from '../../utils/buildChatflow' import { InternalFlowiseError } from '../../errors/internalFlowiseError' import { getErrorMessage } from '../../errors/utils' -const buildChatflow = async (fullRequest: Request) => { +const buildChatflow = async (req: Request) => { try { - const dbResponse = await utilBuildChatflow(fullRequest) + const dbResponse = await utilBuildChatflow(req) return dbResponse } catch (error) { throw new InternalFlowiseError( diff --git a/packages/server/src/services/settings/index.ts b/packages/server/src/services/settings/index.ts new file mode 100644 index 000000000..18f73c813 --- /dev/null +++ b/packages/server/src/services/settings/index.ts @@ -0,0 +1,33 @@ +// TODO: add settings + +import { Platform } from '../../Interface' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' + +const getSettings = async () => { + try { + const appServer = getRunningExpressApp() + const platformType = appServer.identityManager.getPlatformType() + + switch (platformType) { + case Platform.ENTERPRISE: { + if (!appServer.identityManager.isLicenseValid()) { + return {} + } else { + return { PLATFORM_TYPE: Platform.ENTERPRISE } + } + } + case Platform.CLOUD: { + return { PLATFORM_TYPE: Platform.CLOUD } + } + default: { + return { PLATFORM_TYPE: Platform.OPEN_SOURCE } + } + } + } catch (error) { + return {} + } +} + +export default { + getSettings +} diff --git a/packages/server/src/services/stats/index.ts b/packages/server/src/services/stats/index.ts index 8b28c4f95..6ce6d4a10 100644 --- a/packages/server/src/services/stats/index.ts +++ b/packages/server/src/services/stats/index.ts @@ -14,7 +14,8 @@ const getChatflowStats = async ( endDate?: string, messageId?: string, feedback?: boolean, - feedbackTypes?: ChatMessageRatingType[] + feedbackTypes?: ChatMessageRatingType[], + activeWorkspaceId?: string ): Promise => { try { const chatmessages = (await utilGetChatMessage({ @@ -24,15 +25,20 @@ const getChatflowStats = async ( endDate, messageId, feedback, - feedbackTypes + feedbackTypes, + activeWorkspaceId })) as Array const totalMessages = chatmessages.length const totalFeedback = chatmessages.filter((message) => message?.feedback).length const positiveFeedback = chatmessages.filter((message) => message?.feedback?.rating === 'THUMBS_UP').length + // count the number of unique sessions in the chatmessages - count unique sessionId + const uniqueSessions = new Set(chatmessages.map((message) => message.sessionId)) + const totalSessions = uniqueSessions.size const dbResponse = { totalMessages, totalFeedback, - positiveFeedback + positiveFeedback, + totalSessions } return dbResponse diff --git a/packages/server/src/services/text-to-speech/index.ts b/packages/server/src/services/text-to-speech/index.ts new file mode 100644 index 000000000..22a11ede7 --- /dev/null +++ b/packages/server/src/services/text-to-speech/index.ts @@ -0,0 +1,52 @@ +import { StatusCodes } from 'http-status-codes' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { getErrorMessage } from '../../errors/utils' +import { getVoices } from 'flowise-components' +import { databaseEntities } from '../../utils' + +export enum TextToSpeechProvider { + OPENAI = 'openai', + ELEVEN_LABS = 'elevenlabs' +} + +export interface TTSRequest { + text: string + provider: TextToSpeechProvider + credentialId: string + voice?: string + model?: string +} + +export interface TTSResponse { + audioBuffer: Buffer + contentType: string +} + +const getVoicesForProvider = async (provider: string, credentialId?: string): Promise => { + try { + if (!credentialId) { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, 'Credential ID required for this provider') + } + + const appServer = getRunningExpressApp() + const options = { + orgId: '', + chatflowid: '', + chatId: '', + appDataSource: appServer.AppDataSource, + databaseEntities: databaseEntities + } + + return await getVoices(provider, credentialId, options) + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: textToSpeechService.getVoices - ${getErrorMessage(error)}` + ) + } +} + +export default { + getVoices: getVoicesForProvider +} diff --git a/packages/server/src/services/tools/index.ts b/packages/server/src/services/tools/index.ts index 0dbf69b7f..57f7e06ed 100644 --- a/packages/server/src/services/tools/index.ts +++ b/packages/server/src/services/tools/index.ts @@ -1,25 +1,29 @@ import { StatusCodes } from 'http-status-codes' +import { QueryRunner } from 'typeorm' +import { validate } from 'uuid' import { Tool } from '../../database/entities/Tool' import { InternalFlowiseError } from '../../errors/internalFlowiseError' import { getErrorMessage } from '../../errors/utils' +import { FLOWISE_COUNTER_STATUS, FLOWISE_METRIC_COUNTERS } from '../../Interface.Metrics' import { getAppVersion } from '../../utils' import { getRunningExpressApp } from '../../utils/getRunningExpressApp' -import { FLOWISE_METRIC_COUNTERS, FLOWISE_COUNTER_STATUS } from '../../Interface.Metrics' -import { QueryRunner } from 'typeorm' -import { validate } from 'uuid' -const createTool = async (requestBody: any): Promise => { +const createTool = async (requestBody: any, orgId: string): Promise => { try { const appServer = getRunningExpressApp() const newTool = new Tool() Object.assign(newTool, requestBody) const tool = await appServer.AppDataSource.getRepository(Tool).create(newTool) const dbResponse = await appServer.AppDataSource.getRepository(Tool).save(tool) - await appServer.telemetry.sendTelemetry('tool_created', { - version: await getAppVersion(), - toolId: dbResponse.id, - toolName: dbResponse.name - }) + await appServer.telemetry.sendTelemetry( + 'tool_created', + { + version: await getAppVersion(), + toolId: dbResponse.id, + toolName: dbResponse.name + }, + orgId + ) appServer.metricsProvider?.incrementCounter(FLOWISE_METRIC_COUNTERS.TOOL_CREATED, { status: FLOWISE_COUNTER_STATUS.SUCCESS }) return dbResponse } catch (error) { @@ -27,11 +31,12 @@ const createTool = async (requestBody: any): Promise => { } } -const deleteTool = async (toolId: string): Promise => { +const deleteTool = async (toolId: string, workspaceId: string): Promise => { try { const appServer = getRunningExpressApp() const dbResponse = await appServer.AppDataSource.getRepository(Tool).delete({ - id: toolId + id: toolId, + workspaceId: workspaceId }) return dbResponse } catch (error) { @@ -39,21 +44,34 @@ const deleteTool = async (toolId: string): Promise => { } } -const getAllTools = async (): Promise => { +const getAllTools = async (workspaceId?: string, page: number = -1, limit: number = -1) => { try { const appServer = getRunningExpressApp() - const dbResponse = await appServer.AppDataSource.getRepository(Tool).find() - return dbResponse + const queryBuilder = appServer.AppDataSource.getRepository(Tool).createQueryBuilder('tool').orderBy('tool.updatedDate', 'DESC') + + if (page > 0 && limit > 0) { + queryBuilder.skip((page - 1) * limit) + queryBuilder.take(limit) + } + if (workspaceId) queryBuilder.andWhere('tool.workspaceId = :workspaceId', { workspaceId }) + const [data, total] = await queryBuilder.getManyAndCount() + + if (page > 0 && limit > 0) { + return { data, total } + } else { + return data + } } catch (error) { throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: toolsService.getAllTools - ${getErrorMessage(error)}`) } } -const getToolById = async (toolId: string): Promise => { +const getToolById = async (toolId: string, workspaceId: string): Promise => { try { const appServer = getRunningExpressApp() const dbResponse = await appServer.AppDataSource.getRepository(Tool).findOneBy({ - id: toolId + id: toolId, + workspaceId: workspaceId }) if (!dbResponse) { throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Tool ${toolId} not found`) @@ -64,18 +82,19 @@ const getToolById = async (toolId: string): Promise => { } } -const updateTool = async (toolId: string, toolBody: any): Promise => { +const updateTool = async (toolId: string, toolBody: any, workspaceId: string): Promise => { try { const appServer = getRunningExpressApp() const tool = await appServer.AppDataSource.getRepository(Tool).findOneBy({ - id: toolId + id: toolId, + workspaceId: workspaceId }) if (!tool) { throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Tool ${toolId} not found`) } const updateTool = new Tool() Object.assign(updateTool, toolBody) - await appServer.AppDataSource.getRepository(Tool).merge(tool, updateTool) + appServer.AppDataSource.getRepository(Tool).merge(tool, updateTool) const dbResponse = await appServer.AppDataSource.getRepository(Tool).save(tool) return dbResponse } catch (error) { diff --git a/packages/server/src/services/validation/index.ts b/packages/server/src/services/validation/index.ts index 5ac4ea528..85cde6467 100644 --- a/packages/server/src/services/validation/index.ts +++ b/packages/server/src/services/validation/index.ts @@ -13,16 +13,18 @@ interface IValidationResult { issues: string[] } -const checkFlowValidation = async (flowId: string): Promise => { +const checkFlowValidation = async (flowId: string, workspaceId?: string): Promise => { try { const appServer = getRunningExpressApp() const componentNodes = appServer.nodesPool.componentNodes + // Create query conditions with workspace filtering if provided + const whereCondition: any = { id: flowId } + if (workspaceId) whereCondition.workspaceId = workspaceId + const flow = await appServer.AppDataSource.getRepository(ChatFlow).findOne({ - where: { - id: flowId - } + where: whereCondition }) if (!flow) { diff --git a/packages/server/src/services/variables/index.ts b/packages/server/src/services/variables/index.ts index d06e8c6c7..5b427e954 100644 --- a/packages/server/src/services/variables/index.ts +++ b/packages/server/src/services/variables/index.ts @@ -3,14 +3,26 @@ import { getRunningExpressApp } from '../../utils/getRunningExpressApp' import { Variable } from '../../database/entities/Variable' import { InternalFlowiseError } from '../../errors/internalFlowiseError' import { getErrorMessage } from '../../errors/utils' +import { getAppVersion } from '../../utils' import { QueryRunner } from 'typeorm' import { validate } from 'uuid' +import { Platform } from '../../Interface' -const createVariable = async (newVariable: Variable) => { +const createVariable = async (newVariable: Variable, orgId: string) => { + const appServer = getRunningExpressApp() + if (appServer.identityManager.getPlatformType() === Platform.CLOUD && newVariable.type === 'runtime') + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, 'Cloud platform does not support runtime variables!') try { - const appServer = getRunningExpressApp() const variable = await appServer.AppDataSource.getRepository(Variable).create(newVariable) const dbResponse = await appServer.AppDataSource.getRepository(Variable).save(variable) + await appServer.telemetry.sendTelemetry( + 'variable_created', + { + version: await getAppVersion(), + variableType: variable.type + }, + orgId + ) return dbResponse } catch (error) { throw new InternalFlowiseError( @@ -20,10 +32,10 @@ const createVariable = async (newVariable: Variable) => { } } -const deleteVariable = async (variableId: string): Promise => { +const deleteVariable = async (variableId: string, workspaceId: string): Promise => { try { const appServer = getRunningExpressApp() - const dbResponse = await appServer.AppDataSource.getRepository(Variable).delete({ id: variableId }) + const dbResponse = await appServer.AppDataSource.getRepository(Variable).delete({ id: variableId, workspaceId: workspaceId }) return dbResponse } catch (error) { throw new InternalFlowiseError( @@ -33,11 +45,30 @@ const deleteVariable = async (variableId: string): Promise => { } } -const getAllVariables = async () => { +const getAllVariables = async (workspaceId: string, page: number = -1, limit: number = -1) => { try { const appServer = getRunningExpressApp() - const dbResponse = await appServer.AppDataSource.getRepository(Variable).find() - return dbResponse + const queryBuilder = appServer.AppDataSource.getRepository(Variable) + .createQueryBuilder('variable') + .orderBy('variable.updatedDate', 'DESC') + + if (page > 0 && limit > 0) { + queryBuilder.skip((page - 1) * limit) + queryBuilder.take(limit) + } + if (workspaceId) queryBuilder.andWhere('variable.workspaceId = :workspaceId', { workspaceId }) + + if (appServer.identityManager.getPlatformType() === Platform.CLOUD) { + queryBuilder.andWhere('variable.type != :type', { type: 'runtime' }) + } + + const [data, total] = await queryBuilder.getManyAndCount() + + if (page > 0 && limit > 0) { + return { data, total } + } else { + return data + } } catch (error) { throw new InternalFlowiseError( StatusCodes.INTERNAL_SERVER_ERROR, @@ -46,12 +77,18 @@ const getAllVariables = async () => { } } -const getVariableById = async (variableId: string) => { +const getVariableById = async (variableId: string, workspaceId: string) => { try { const appServer = getRunningExpressApp() const dbResponse = await appServer.AppDataSource.getRepository(Variable).findOneBy({ - id: variableId + id: variableId, + workspaceId: workspaceId }) + + if (appServer.identityManager.getPlatformType() === Platform.CLOUD && dbResponse?.type === 'runtime') { + throw new InternalFlowiseError(StatusCodes.FORBIDDEN, 'Cloud platform does not support runtime variables!') + } + return dbResponse } catch (error) { throw new InternalFlowiseError( @@ -62,8 +99,10 @@ const getVariableById = async (variableId: string) => { } const updateVariable = async (variable: Variable, updatedVariable: Variable) => { + const appServer = getRunningExpressApp() + if (appServer.identityManager.getPlatformType() === Platform.CLOUD && updatedVariable.type === 'runtime') + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, 'Cloud platform does not support runtime variables!') try { - const appServer = getRunningExpressApp() const tmpUpdatedVariable = await appServer.AppDataSource.getRepository(Variable).merge(variable, updatedVariable) const dbResponse = await appServer.AppDataSource.getRepository(Variable).save(tmpUpdatedVariable) return dbResponse @@ -106,7 +145,7 @@ const importVariables = async (newVariables: Partial[], queryRunner?: }) // step 3 - remove ids that are only duplicate - const prepVariables: Partial[] = newVariables.map((newVariable) => { + let prepVariables: Partial[] = newVariables.map((newVariable) => { let id: string = '' if (newVariable.id) id = newVariable.id if (foundIds.includes(id)) { @@ -116,6 +155,10 @@ const importVariables = async (newVariables: Partial[], queryRunner?: return newVariable }) + // Filter out variables with type "runtime" + if (appServer.identityManager.getPlatformType() === Platform.CLOUD) + prepVariables = prepVariables.filter((variable) => variable.type !== 'runtime') + // step 4 - transactional insert array of entities const insertResponse = await repository.insert(prepVariables) diff --git a/packages/server/src/utils/SSEStreamer.ts b/packages/server/src/utils/SSEStreamer.ts index 2b950579c..132b37ad1 100644 --- a/packages/server/src/utils/SSEStreamer.ts +++ b/packages/server/src/utils/SSEStreamer.ts @@ -257,4 +257,50 @@ export class SSEStreamer implements IServerSideEventStreamer { client.response.write('message:\ndata:' + JSON.stringify(clientResponse) + '\n\n') } } + + streamTTSStartEvent(chatId: string, chatMessageId: string, format: string): void { + const client = this.clients[chatId] + if (client) { + const clientResponse = { + event: 'tts_start', + data: { chatMessageId, format } + } + client.response.write('message:\ndata:' + JSON.stringify(clientResponse) + '\n\n') + } + } + + streamTTSDataEvent(chatId: string, chatMessageId: string, audioChunk: string): void { + const client = this.clients[chatId] + if (client) { + const clientResponse = { + event: 'tts_data', + data: { chatMessageId, audioChunk } + } + client.response.write('message:\ndata:' + JSON.stringify(clientResponse) + '\n\n') + } + } + + streamTTSEndEvent(chatId: string, chatMessageId: string): void { + const client = this.clients[chatId] + if (client) { + const clientResponse = { + event: 'tts_end', + data: { chatMessageId } + } + client.response.write('message:\ndata:' + JSON.stringify(clientResponse) + '\n\n') + } + } + + streamTTSAbortEvent(chatId: string, chatMessageId: string): void { + const client = this.clients[chatId] + if (client) { + const clientResponse = { + event: 'tts_abort', + data: { chatMessageId } + } + client.response.write('message:\ndata:' + JSON.stringify(clientResponse) + '\n\n') + client.response.end() + delete this.clients[chatId] + } + } } diff --git a/packages/server/src/utils/XSS.ts b/packages/server/src/utils/XSS.ts index 96bbab573..f7c446869 100644 --- a/packages/server/src/utils/XSS.ts +++ b/packages/server/src/utils/XSS.ts @@ -1,5 +1,6 @@ import { Request, Response, NextFunction } from 'express' import sanitizeHtml from 'sanitize-html' +import { isPredictionRequest, extractChatflowId, validateChatflowDomain } from './domainValidation' export function sanitizeMiddleware(req: Request, res: Response, next: NextFunction): void { // decoding is necessary as the url is encoded by the browser @@ -20,22 +21,60 @@ export function sanitizeMiddleware(req: Request, res: Response, next: NextFuncti } export function getAllowedCorsOrigins(): string { - // Expects FQDN separated by commas, otherwise nothing or * for all. - return process.env.CORS_ORIGINS ?? '*' + // Expects FQDN separated by commas, otherwise nothing. + return process.env.CORS_ORIGINS ?? '' +} + +function parseAllowedOrigins(allowedOrigins: string): string[] { + if (!allowedOrigins) { + return [] + } + if (allowedOrigins === '*') { + return ['*'] + } + return allowedOrigins + .split(',') + .map((origin) => origin.trim().toLowerCase()) + .filter((origin) => origin.length > 0) } export function getCorsOptions(): any { - const corsOptions = { - origin: function (origin: string | undefined, callback: (err: Error | null, allow?: boolean) => void) { - const allowedOrigins = getAllowedCorsOrigins() - if (!origin || allowedOrigins == '*' || allowedOrigins.indexOf(origin) !== -1) { - callback(null, true) - } else { - callback(null, false) + return (req: any, callback: (err: Error | null, options?: any) => void) => { + const corsOptions = { + origin: async (origin: string | undefined, originCallback: (err: Error | null, allow?: boolean) => void) => { + const allowedOrigins = getAllowedCorsOrigins() + const isPredictionReq = isPredictionRequest(req.url) + const allowedList = parseAllowedOrigins(allowedOrigins) + const originLc = origin?.toLowerCase() + + // Always allow no-Origin requests (same-origin, server-to-server) + if (!originLc) return originCallback(null, true) + + // Global allow: '*' or exact match + const globallyAllowed = allowedOrigins === '*' || allowedList.includes(originLc) + + if (isPredictionReq) { + // Per-chatflow allowlist OR globally allowed + const chatflowId = extractChatflowId(req.url) + let chatflowAllowed = false + if (chatflowId) { + try { + chatflowAllowed = await validateChatflowDomain(chatflowId, originLc, req.user?.activeWorkspaceId) + } catch (error) { + // Log error and deny on failure + console.error('Domain validation error:', error) + chatflowAllowed = false + } + } + return originCallback(null, globallyAllowed || chatflowAllowed) + } + + // Non-prediction: rely on global policy only + return originCallback(null, globallyAllowed) } } + callback(null, corsOptions) } - return corsOptions } export function getAllowedIframeOrigins(): string { diff --git a/packages/server/src/utils/addChatflowsCount.ts b/packages/server/src/utils/addChatflowsCount.ts index ede05121c..3b8d4c069 100644 --- a/packages/server/src/utils/addChatflowsCount.ts +++ b/packages/server/src/utils/addChatflowsCount.ts @@ -15,6 +15,7 @@ export const addChatflowsCount = async (keys: any) => { const chatflows = await appServer.AppDataSource.getRepository(ChatFlow) .createQueryBuilder('cf') .where('cf.apikeyid = :apikeyid', { apikeyid: key.id }) + .andWhere('cf.workspaceId = :workspaceId', { workspaceId: key.workspaceId }) .getMany() const linkedChatFlows: any[] = [] chatflows.map((cf) => { diff --git a/packages/server/src/utils/apiKey.ts b/packages/server/src/utils/apiKey.ts index a50b2b54a..9aa5daa9b 100644 --- a/packages/server/src/utils/apiKey.ts +++ b/packages/server/src/utils/apiKey.ts @@ -1,10 +1,14 @@ import { randomBytes, scryptSync, timingSafeEqual } from 'crypto' import { ICommonObject } from 'flowise-components' -import moment from 'moment' import fs from 'fs' import path from 'path' -import logger from './logger' -import { appConfig } from '../AppConfig' +import { DataSource } from 'typeorm' +import { ApiKey } from '../database/entities/ApiKey' +import { Workspace } from '../enterprise/database/entities/workspace.entity' +import { v4 as uuidv4 } from 'uuid' +import { ChatFlow } from '../database/entities/ChatFlow' +import { addChatflowsCount } from './addChatflowsCount' +import { Platform } from '../Interface' /** * Returns the api key path @@ -51,94 +55,14 @@ export const compareKeys = (storedKey: string, suppliedKey: string): boolean => * @returns {Promise} */ export const getAPIKeys = async (): Promise => { - if (appConfig.apiKeys.storageType !== 'json') { - return [] - } try { const content = await fs.promises.readFile(getAPIKeyPath(), 'utf8') return JSON.parse(content) } catch (error) { - const keyName = 'DefaultKey' - const apiKey = generateAPIKey() - const apiSecret = generateSecretHash(apiKey) - const content = [ - { - keyName, - apiKey, - apiSecret, - createdAt: moment().format('DD-MMM-YY'), - id: randomBytes(16).toString('hex') - } - ] - await fs.promises.writeFile(getAPIKeyPath(), JSON.stringify(content), 'utf8') - return content + return [] } } -/** - * Add new API key - * @param {string} keyName - * @returns {Promise} - */ -export const addAPIKey = async (keyName: string): Promise => { - const existingAPIKeys = await getAPIKeys() - const apiKey = generateAPIKey() - const apiSecret = generateSecretHash(apiKey) - const content = [ - ...existingAPIKeys, - { - keyName, - apiKey, - apiSecret, - createdAt: moment().format('DD-MMM-YY'), - id: randomBytes(16).toString('hex') - } - ] - await fs.promises.writeFile(getAPIKeyPath(), JSON.stringify(content), 'utf8') - return content -} - -/** - * import API keys - * @param {[]} keys - * @returns {Promise} - */ -export const importKeys = async (keys: any[], importMode: string): Promise => { - const allApiKeys = await getAPIKeys() - // if importMode is errorIfExist, check for existing keys and raise error before any modification to the file - if (importMode === 'errorIfExist') { - for (const key of keys) { - const keyNameExists = allApiKeys.find((k) => k.keyName === key.keyName) - if (keyNameExists) { - throw new Error(`Key with name ${key.keyName} already exists`) - } - } - } - for (const key of keys) { - // Check if keyName already exists, if overwrite is false, raise an error else overwrite the key - const keyNameExists = allApiKeys.find((k) => k.keyName === key.keyName) - if (keyNameExists) { - const keyIndex = allApiKeys.findIndex((k) => k.keyName === key.keyName) - switch (importMode) { - case 'overwriteIfExist': - allApiKeys[keyIndex] = key - continue - case 'ignoreIfExist': - // ignore this key and continue - continue - case 'errorIfExist': - // should not reach here as we have already checked for existing keys - throw new Error(`Key with name ${key.keyName} already exists`) - default: - throw new Error(`Unknown overwrite option ${importMode}`) - } - } - allApiKeys.push(key) - } - await fs.promises.writeFile(getAPIKeyPath(), JSON.stringify(allApiKeys), 'utf8') - return allApiKeys -} - /** * Get API Key details * @param {string} apiKey @@ -151,42 +75,82 @@ export const getApiKey = async (apiKey: string) => { return existingAPIKeys[keyIndex] } -/** - * Update existing API key - * @param {string} keyIdToUpdate - * @param {string} newKeyName - * @returns {Promise} - */ -export const updateAPIKey = async (keyIdToUpdate: string, newKeyName: string): Promise => { - const existingAPIKeys = await getAPIKeys() - const keyIndex = existingAPIKeys.findIndex((key) => key.id === keyIdToUpdate) - if (keyIndex < 0) return [] - existingAPIKeys[keyIndex].keyName = newKeyName - await fs.promises.writeFile(getAPIKeyPath(), JSON.stringify(existingAPIKeys), 'utf8') - return existingAPIKeys -} +export const migrateApiKeysFromJsonToDb = async (appDataSource: DataSource, platformType: Platform) => { + if (platformType === Platform.CLOUD) { + return + } -/** - * Delete API key - * @param {string} keyIdToDelete - * @returns {Promise} - */ -export const deleteAPIKey = async (keyIdToDelete: string): Promise => { - const existingAPIKeys = await getAPIKeys() - const result = existingAPIKeys.filter((key) => key.id !== keyIdToDelete) - await fs.promises.writeFile(getAPIKeyPath(), JSON.stringify(result), 'utf8') - return result -} + if (!process.env.APIKEY_STORAGE_TYPE || process.env.APIKEY_STORAGE_TYPE === 'json') { + const keys = await getAPIKeys() + if (keys.length > 0) { + try { + // Get all available workspaces + const workspaces = await appDataSource.getRepository(Workspace).find() -/** - * Replace all api keys - * @param {ICommonObject[]} content - * @returns {Promise} - */ -export const replaceAllAPIKeys = async (content: ICommonObject[]): Promise => { - try { - await fs.promises.writeFile(getAPIKeyPath(), JSON.stringify(content), 'utf8') - } catch (error) { - logger.error(error) + for (const key of keys) { + const existingKey = await appDataSource.getRepository(ApiKey).findOneBy({ + apiKey: key.apiKey + }) + + // Only add if key doesn't already exist in DB + if (!existingKey) { + // Create a new API key for each workspace + if (workspaces.length > 0) { + for (const workspace of workspaces) { + const newKey = new ApiKey() + newKey.id = uuidv4() + newKey.apiKey = key.apiKey + newKey.apiSecret = key.apiSecret + newKey.keyName = key.keyName + newKey.workspaceId = workspace.id + + const keyEntity = appDataSource.getRepository(ApiKey).create(newKey) + await appDataSource.getRepository(ApiKey).save(keyEntity) + + const chatflows = await appDataSource.getRepository(ChatFlow).findBy({ + apikeyid: key.id, + workspaceId: workspace.id + }) + + for (const chatflow of chatflows) { + chatflow.apikeyid = newKey.id + await appDataSource.getRepository(ChatFlow).save(chatflow) + } + + await addChatflowsCount(chatflows) + } + } else { + // If no workspaces exist, create the key without a workspace ID and later will be updated by setNullWorkspaceId + const newKey = new ApiKey() + newKey.id = uuidv4() + newKey.apiKey = key.apiKey + newKey.apiSecret = key.apiSecret + newKey.keyName = key.keyName + + const keyEntity = appDataSource.getRepository(ApiKey).create(newKey) + await appDataSource.getRepository(ApiKey).save(keyEntity) + + const chatflows = await appDataSource.getRepository(ChatFlow).findBy({ + apikeyid: key.id + }) + + for (const chatflow of chatflows) { + chatflow.apikeyid = newKey.id + await appDataSource.getRepository(ChatFlow).save(chatflow) + } + + await addChatflowsCount(chatflows) + } + } + } + + // Delete the JSON file + if (fs.existsSync(getAPIKeyPath())) { + fs.unlinkSync(getAPIKeyPath()) + } + } catch (error) { + console.error('Error migrating API keys from JSON to DB', error) + } + } } } diff --git a/packages/server/src/utils/buildAgentGraph.ts b/packages/server/src/utils/buildAgentGraph.ts index 9097a3e2b..314891709 100644 --- a/packages/server/src/utils/buildAgentGraph.ts +++ b/packages/server/src/utils/buildAgentGraph.ts @@ -26,6 +26,7 @@ import { InternalFlowiseError } from '../errors/internalFlowiseError' import { getErrorMessage } from '../errors/utils' import logger from './logger' import { Variable } from '../database/entities/Variable' +import { getWorkspaceSearchOptions } from '../enterprise/utils/ControllerServiceUtils' import { DataSource } from 'typeorm' import { CachePool } from '../CachePool' @@ -50,7 +51,9 @@ export const buildAgentGraph = async ({ shouldStreamResponse, cachePool, baseURL, - signal + signal, + orgId, + workspaceId }: { agentflow: IChatFlow flowConfig: IFlowConfig @@ -70,6 +73,8 @@ export const buildAgentGraph = async ({ cachePool: CachePool baseURL: string signal?: AbortController + orgId: string + workspaceId?: string }): Promise => { try { const chatflowid = flowConfig.chatflowid @@ -79,9 +84,12 @@ export const buildAgentGraph = async ({ const uploads = incomingInput.uploads const options = { + orgId, + workspaceId, chatId, sessionId, chatflowid, + chatflowId: chatflowid, logger, analytic, appDataSource, @@ -384,7 +392,7 @@ export const buildAgentGraph = async ({ } } catch (e) { // clear agent memory because checkpoints were saved during runtime - await clearSessionMemory(nodes, componentNodes, chatId, appDataSource, sessionId) + await clearSessionMemory(nodes, componentNodes, chatId, appDataSource, orgId, sessionId) if (getErrorMessage(e).includes('Aborted')) { if (shouldStreamResponse && sseStreamer) { sseStreamer.streamAbortEvent(chatId) @@ -395,7 +403,7 @@ export const buildAgentGraph = async ({ } return streamResults } catch (e) { - logger.error('[server]: Error:', e) + logger.error(`[server]: [${orgId}]: Error:`, e) throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error buildAgentGraph - ${getErrorMessage(e)}`) } } @@ -457,7 +465,7 @@ const compileMultiAgentsGraph = async (params: MultiAgentsGraphParams) => { const workerNodes = reactFlowNodes.filter((node) => workerNodeIds.includes(node.data.id)) /*** Get API Config ***/ - const availableVariables = await appDataSource.getRepository(Variable).find() + const availableVariables = await appDataSource.getRepository(Variable).findBy(getWorkspaceSearchOptions(agentflow.workspaceId)) const { nodeOverrides, variableOverrides, apiOverrideStatus } = getAPIOverrideConfig(agentflow) let supervisorWorkers: { [key: string]: IMultiAgentNode[] } = {} @@ -566,7 +574,7 @@ const compileMultiAgentsGraph = async (params: MultiAgentsGraphParams) => { const graph = workflowGraph.compile({ checkpointer: memory }) - const loggerHandler = new ConsoleCallbackHandler(logger) + const loggerHandler = new ConsoleCallbackHandler(logger, options?.orgId) const callbacks = await additionalCallbacks(flowNodeData, options) const config = { configurable: { thread_id: threadId } } @@ -686,7 +694,7 @@ const compileSeqAgentsGraph = async (params: SeqAgentsGraphParams) => { let interruptToolNodeNames = [] /*** Get API Config ***/ - const availableVariables = await appDataSource.getRepository(Variable).find() + const availableVariables = await appDataSource.getRepository(Variable).findBy(getWorkspaceSearchOptions(agentflow.workspaceId)) const { nodeOverrides, variableOverrides, apiOverrideStatus } = getAPIOverrideConfig(agentflow) const initiateNode = async (node: IReactFlowNode) => { @@ -996,7 +1004,7 @@ const compileSeqAgentsGraph = async (params: SeqAgentsGraphParams) => { interruptBefore: interruptToolNodeNames as any }) - const loggerHandler = new ConsoleCallbackHandler(logger) + const loggerHandler = new ConsoleCallbackHandler(logger, options?.orgId) const callbacks = await additionalCallbacks(flowNodeData as any, options) const config = { configurable: { thread_id: threadId }, bindModel } @@ -1044,7 +1052,7 @@ const compileSeqAgentsGraph = async (params: SeqAgentsGraphParams) => { configurable: config }) } catch (e) { - logger.error('Error compile graph', e) + logger.error(`[${options.orgId}]: Error compile graph`, e) throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error compile graph - ${getErrorMessage(e)}`) } } diff --git a/packages/server/src/utils/buildAgentflow.ts b/packages/server/src/utils/buildAgentflow.ts index 28c117057..5144eacd5 100644 --- a/packages/server/src/utils/buildAgentflow.ts +++ b/packages/server/src/utils/buildAgentflow.ts @@ -40,7 +40,11 @@ import { getGlobalVariable, getStartingNode, getTelemetryFlowObj, - QUESTION_VAR_PREFIX + QUESTION_VAR_PREFIX, + CURRENT_DATE_TIME_VAR_PREFIX, + _removeCredentialId, + validateHistorySchema, + LOOP_COUNT_VAR_PREFIX } from '.' import { ChatFlow } from '../database/entities/ChatFlow' import { Variable } from '../database/entities/Variable' @@ -52,6 +56,9 @@ import { utilAddChatMessage } from './addChatMesage' import { CachePool } from '../CachePool' import { ChatMessage } from '../database/entities/ChatMessage' import { Telemetry } from './telemetry' +import { getWorkspaceSearchOptions } from '../enterprise/utils/ControllerServiceUtils' +import { UsageCacheManager } from '../UsageCacheManager' +import { generateTTSForResponseStream, shouldAutoPlayTTS } from './buildChatflow' interface IWaitingNode { nodeId: string @@ -79,6 +86,8 @@ interface IProcessNodeOutputsParams { waitingNodes: Map loopCounts: Map abortController?: AbortController + sseStreamer?: IServerSideEventStreamer + chatId: string } interface IAgentFlowRuntime { @@ -99,9 +108,12 @@ interface IExecuteNodeParams { chatId: string sessionId: string apiMessageId: string + evaluationRunId?: string isInternal: boolean pastChatHistory: IMessage[] + prependedChatHistory: IMessage[] appDataSource: DataSource + usageCacheManager: UsageCacheManager telemetry: Telemetry componentNodes: IComponentNodes cachePool: CachePool @@ -122,6 +134,11 @@ interface IExecuteNodeParams { parentExecutionId?: string isRecursive?: boolean iterationContext?: ICommonObject + loopCounts?: Map + orgId: string + workspaceId: string + subscriptionId: string + productId: string } interface IExecuteAgentFlowParams extends Omit { @@ -142,13 +159,15 @@ const addExecution = async ( appDataSource: DataSource, agentflowId: string, agentFlowExecutedData: IAgentflowExecutedData[], - sessionId: string + sessionId: string, + workspaceId: string ) => { const newExecution = new Execution() const bodyExecution = { agentflowId, state: 'INPROGRESS', sessionId, + workspaceId, executionData: JSON.stringify(agentFlowExecutedData) } Object.assign(newExecution, bodyExecution) @@ -164,9 +183,10 @@ const addExecution = async ( * @param {Partial} data * @returns {Promise} */ -const updateExecution = async (appDataSource: DataSource, executionId: string, data?: Partial) => { +const updateExecution = async (appDataSource: DataSource, executionId: string, workspaceId: string, data?: Partial) => { const execution = await appDataSource.getRepository(Execution).findOneBy({ - id: executionId + id: executionId, + workspaceId }) if (!execution) { @@ -192,21 +212,6 @@ const updateExecution = async (appDataSource: DataSource, executionId: string, d await appDataSource.getRepository(Execution).save(execution) } -export const _removeCredentialId = (obj: any): any => { - if (!obj || typeof obj !== 'object') return obj - - if (Array.isArray(obj)) { - return obj.map((item) => _removeCredentialId(item)) - } - - const newObj: Record = {} - for (const [key, value] of Object.entries(obj)) { - if (key === 'FLOWISE_CREDENTIAL_ID') continue - newObj[key] = _removeCredentialId(value) - } - return newObj -} - export const resolveVariables = async ( reactFlowNodeData: INodeData, question: string, @@ -216,8 +221,10 @@ export const resolveVariables = async ( variableOverrides: IVariableOverride[], uploadedFilesContent: string, chatHistory: IMessage[], + componentNodes: IComponentNodes, agentFlowExecutedData?: IAgentflowExecutedData[], - iterationContext?: ICommonObject + iterationContext?: ICommonObject, + loopCounts?: Map ): Promise => { let flowNodeData = cloneDeep(reactFlowNodeData) const types = 'inputs' @@ -284,9 +291,32 @@ export const resolveVariables = async ( resolvedValue = resolvedValue.replace(match, flowConfig?.runtimeChatHistoryLength ?? 0) } + if (variableFullPath === LOOP_COUNT_VAR_PREFIX) { + // Get the current loop count from the most recent loopAgentflow node execution + let currentLoopCount = 0 + if (loopCounts && agentFlowExecutedData) { + // Find the most recent loopAgentflow node execution to get its loop count + const loopNodes = [...agentFlowExecutedData].reverse().filter((data) => data.data?.name === 'loopAgentflow') + if (loopNodes.length > 0) { + const latestLoopNode = loopNodes[0] + currentLoopCount = loopCounts.get(latestLoopNode.nodeId) || 0 + } + } + resolvedValue = resolvedValue.replace(match, currentLoopCount.toString()) + } + + if (variableFullPath === CURRENT_DATE_TIME_VAR_PREFIX) { + resolvedValue = resolvedValue.replace(match, new Date().toISOString()) + } + if (variableFullPath.startsWith('$iteration')) { if (iterationContext && iterationContext.value) { - if (typeof iterationContext.value === 'string') { + if (variableFullPath === '$iteration') { + // If it's exactly $iteration, stringify the entire value + const formattedValue = + typeof iterationContext.value === 'object' ? JSON.stringify(iterationContext.value) : iterationContext.value + resolvedValue = resolvedValue.replace(match, formattedValue) + } else if (typeof iterationContext.value === 'string') { resolvedValue = resolvedValue.replace(match, iterationContext?.value) } else if (typeof iterationContext.value === 'object') { const iterationValue = get(iterationContext.value, variableFullPath.replace('$iteration.', '')) @@ -325,6 +355,38 @@ export const resolveVariables = async ( } } + // Check if the variable is an output reference like `nodeId.output.path` + const outputMatch = variableFullPath.match(/^(.*?)\.output\.(.+)$/) + if (outputMatch && agentFlowExecutedData) { + // Extract nodeId and outputPath from the match + const [, nodeIdPart, outputPath] = outputMatch + // Clean nodeId (handle escaped underscores) + const cleanNodeId = nodeIdPart.replace('\\', '') + + // Find the last (most recent) matching node data instead of the first one + const nodeData = [...agentFlowExecutedData].reverse().find((d) => d.nodeId === cleanNodeId) + + if (nodeData?.data?.output && outputPath.trim()) { + const variableValue = get(nodeData.data.output, outputPath) + if (variableValue !== undefined) { + // Replace the reference with actual value + const formattedValue = + Array.isArray(variableValue) || (typeof variableValue === 'object' && variableValue !== null) + ? JSON.stringify(variableValue) + : variableValue + // If the resolved value is exactly the match, replace it directly + if (resolvedValue === match) { + resolvedValue = formattedValue + } else { + // Otherwise do a standard stringโ€replace + resolvedValue = String(resolvedValue).replace(match, String(formattedValue)) + } + // Skip fallback logic + continue + } + } + } + // Find node data in executed data // sometimes turndown value returns a backslash like `llmAgentflow\_1`, remove the backslash const cleanNodeId = variableFullPath.replace('\\', '') @@ -334,7 +396,8 @@ export const resolveVariables = async ( : undefined if (nodeData && nodeData.data) { // Replace the reference with actual value - const actualValue = (nodeData.data['output'] as ICommonObject)?.content + const nodeOutput = nodeData.data['output'] as ICommonObject + const actualValue = nodeOutput?.content ?? nodeOutput?.http?.data // For arrays and objects, stringify them to prevent toString() conversion issues const formattedValue = Array.isArray(actualValue) || (typeof actualValue === 'object' && actualValue !== null) @@ -348,6 +411,175 @@ export const resolveVariables = async ( } const getParamValues = async (paramsObj: ICommonObject) => { + /* + * EXAMPLE SCENARIO: + * + * 1. Agent node has inputParam: { name: "agentTools", type: "array", array: [{ name: "agentSelectedTool", loadConfig: true }] } + * 2. Inputs contain: { agentTools: [{ agentSelectedTool: "requestsGet", agentSelectedToolConfig: { requestsGetHeaders: "Bearer {{ $vars.TOKEN }}" } }] } + * 3. We need to resolve the variable in requestsGetHeaders because RequestsGet node defines requestsGetHeaders with acceptVariable: true + * + * STEP 1: Find all parameters with loadConfig=true (e.g., "agentSelectedTool") + * STEP 2: Find their values in inputs (e.g., "requestsGet") + * STEP 3: Look up component node definition for "requestsGet" + * STEP 4: Find which of its parameters have acceptVariable=true (e.g., "requestsGetHeaders") + * STEP 5: Find the config object (e.g., "agentSelectedToolConfig") + * STEP 6: Resolve variables in config parameters that accept variables + */ + + // Helper function to find params with loadConfig recursively + // Example: Finds ["agentModel", "agentSelectedTool"] from the inputParams structure + const findParamsWithLoadConfig = (inputParams: any[]): string[] => { + const paramsWithLoadConfig: string[] = [] + + for (const param of inputParams) { + // Direct loadConfig param (e.g., agentModel with loadConfig: true) + if (param.loadConfig === true) { + paramsWithLoadConfig.push(param.name) + } + + // Check nested array parameters (e.g., agentTools.array contains agentSelectedTool with loadConfig: true) + if (param.type === 'array' && param.array && Array.isArray(param.array)) { + const nestedParams = findParamsWithLoadConfig(param.array) + paramsWithLoadConfig.push(...nestedParams) + } + } + + return paramsWithLoadConfig + } + + // Helper function to find value of a parameter recursively in nested objects/arrays + // Example: Searches for "agentSelectedTool" value in complex nested inputs structure + // Returns "requestsGet" when found in agentTools[0].agentSelectedTool + const findParamValue = (obj: any, paramName: string): any => { + if (typeof obj !== 'object' || obj === null) { + return undefined + } + + // Handle arrays (e.g., agentTools array) + if (Array.isArray(obj)) { + for (const item of obj) { + const result = findParamValue(item, paramName) + if (result !== undefined) { + return result + } + } + return undefined + } + + // Direct property match + if (Object.prototype.hasOwnProperty.call(obj, paramName)) { + return obj[paramName] + } + + // Recursively search nested objects + for (const value of Object.values(obj)) { + const result = findParamValue(value, paramName) + if (result !== undefined) { + return result + } + } + + return undefined + } + + // Helper function to process config parameters with acceptVariable + // Example: Processes agentSelectedToolConfig object, resolving variables in requestsGetHeaders + const processConfigParams = async (configObj: any, configParamWithAcceptVariables: string[]) => { + if (typeof configObj !== 'object' || configObj === null) { + return + } + + // Handle arrays of config objects + if (Array.isArray(configObj)) { + for (const item of configObj) { + await processConfigParams(item, configParamWithAcceptVariables) + } + return + } + + for (const [key, value] of Object.entries(configObj)) { + // Only resolve variables for parameters that accept them + // Example: requestsGetHeaders is in configParamWithAcceptVariables, so resolve "Bearer {{ $vars.TOKEN }}" + if (configParamWithAcceptVariables.includes(key)) { + configObj[key] = await resolveNodeReference(value) + } + } + } + + // STEP 1: Get all params with loadConfig from inputParams + // Example result: ["agentModel", "agentSelectedTool"] + const paramsWithLoadConfig = findParamsWithLoadConfig(reactFlowNodeData.inputParams) + + // STEP 2-6: Process each param with loadConfig + for (const paramWithLoadConfig of paramsWithLoadConfig) { + // STEP 2: Find the value of this parameter in the inputs + // Example: paramWithLoadConfig="agentSelectedTool", paramValue="requestsGet" + const paramValue = findParamValue(paramsObj, paramWithLoadConfig) + + if (paramValue && componentNodes[paramValue]) { + // STEP 3: Get the node instance inputs to find params with acceptVariable + // Example: componentNodes["requestsGet"] contains the RequestsGet node definition + const nodeInstance = componentNodes[paramValue] + const configParamWithAcceptVariables: string[] = [] + + // STEP 4: Find which parameters of the component accept variables + // Example: RequestsGet has inputs like { name: "requestsGetHeaders", acceptVariable: true } + if (nodeInstance.inputs && Array.isArray(nodeInstance.inputs)) { + for (const input of nodeInstance.inputs) { + if (input.acceptVariable === true) { + configParamWithAcceptVariables.push(input.name) + } + } + } + // Example result: configParamWithAcceptVariables = ["requestsGetHeaders", "requestsGetUrl", ...] + + // STEP 5: Look for the config object (paramName + "Config") + // Example: Look for "agentSelectedToolConfig" in the inputs + const configParamName = paramWithLoadConfig + 'Config' + + // Find all config values (handle arrays) + const findAllConfigValues = (obj: any, paramName: string): any[] => { + const results: any[] = [] + + if (typeof obj !== 'object' || obj === null) { + return results + } + + // Handle arrays (e.g., agentTools array) + if (Array.isArray(obj)) { + for (const item of obj) { + results.push(...findAllConfigValues(item, paramName)) + } + return results + } + + // Direct property match + if (Object.prototype.hasOwnProperty.call(obj, paramName)) { + results.push(obj[paramName]) + } + + // Recursively search nested objects + for (const value of Object.values(obj)) { + results.push(...findAllConfigValues(value, paramName)) + } + + return results + } + + const configValues = findAllConfigValues(paramsObj, configParamName) + + // STEP 6: Process all config objects to resolve variables + // Example: Resolve "Bearer {{ $vars.TOKEN }}" in requestsGetHeaders + if (configValues.length > 0 && configParamWithAcceptVariables.length > 0) { + for (const configValue of configValues) { + await processConfigParams(configValue, configParamWithAcceptVariables) + } + } + } + } + + // Original logic for direct acceptVariable params (maintains backward compatibility) + // Example: Direct params like agentUserMessage with acceptVariable: true for (const key in paramsObj) { const paramValue = paramsObj[key] const isAcceptVariable = reactFlowNodeData.inputParams.find((param) => param.name === key)?.acceptVariable ?? false @@ -523,7 +755,6 @@ function hasReceivedRequiredInputs(waitingNode: IWaitingNode): boolean { async function determineNodesToIgnore( currentNode: IReactFlowNode, result: any, - humanInput: IHumanInput | undefined, edges: IReactFlowEdge[], nodeId: string ): Promise { @@ -533,7 +764,7 @@ async function determineNodesToIgnore( const isDecisionNode = currentNode.data.name === 'conditionAgentflow' || currentNode.data.name === 'conditionAgentAgentflow' || - (currentNode.data.name === 'humanInputAgentflow' && humanInput) + currentNode.data.name === 'humanInputAgentflow' if (isDecisionNode && result.output?.conditions) { const outputConditions: ICondition[] = result.output.conditions @@ -571,7 +802,9 @@ async function processNodeOutputs({ edges, nodeExecutionQueue, waitingNodes, - loopCounts + loopCounts, + sseStreamer, + chatId }: IProcessNodeOutputsParams): Promise<{ humanInput?: IHumanInput }> { logger.debug(`\n๐Ÿ”„ Processing outputs from node: ${nodeId}`) @@ -584,7 +817,7 @@ async function processNodeOutputs({ if (!currentNode) return { humanInput: updatedHumanInput } // Get nodes to ignore based on conditions - const ignoreNodeIds = await determineNodesToIgnore(currentNode, result, humanInput, edges, nodeId) + const ignoreNodeIds = await determineNodesToIgnore(currentNode, result, edges, nodeId) if (ignoreNodeIds.length) { logger.debug(` โญ๏ธ Skipping nodes: [${ignoreNodeIds.join(', ')}]`) } @@ -652,6 +885,11 @@ async function processNodeOutputs({ } } else { logger.debug(` โš ๏ธ Maximum loop count (${maxLoop}) reached, stopping loop`) + const fallbackMessage = result.output.fallbackMessage || `Loop completed after reaching maximum iteration count of ${maxLoop}.` + if (sseStreamer) { + sseStreamer.streamTokenEvent(chatId, fallbackMessage) + } + result.output = { ...result.output, content: fallbackMessage } } } @@ -770,9 +1008,12 @@ const executeNode = async ({ chatId, sessionId, apiMessageId, + evaluationRunId, parentExecutionId, pastChatHistory, + prependedChatHistory, appDataSource, + usageCacheManager, telemetry, componentNodes, cachePool, @@ -792,11 +1033,17 @@ const executeNode = async ({ analyticHandlers, isInternal, isRecursive, - iterationContext + iterationContext, + loopCounts, + orgId, + workspaceId, + subscriptionId, + productId }: IExecuteNodeParams): Promise<{ result: any shouldStop?: boolean agentFlowExecutedData?: IAgentflowExecutedData[] + humanInput?: IHumanInput }> => { try { if (abortController?.signal?.aborted) { @@ -824,7 +1071,7 @@ const executeNode = async ({ } // Get available variables and resolve them - const availableVariables = await appDataSource.getRepository(Variable).find() + const availableVariables = await appDataSource.getRepository(Variable).findBy(getWorkspaceSearchOptions(workspaceId)) // Prepare flow config let updatedState = cloneDeep(agentflowRuntime.state) @@ -832,6 +1079,7 @@ const executeNode = async ({ const chatHistory = [...pastChatHistory, ...runtimeChatHistory] const flowConfig: IFlowConfig = { chatflowid: chatflow.id, + chatflowId: chatflow.id, chatId, sessionId, apiMessageId, @@ -863,22 +1111,30 @@ const executeNode = async ({ variableOverrides, uploadedFilesContent, chatHistory, + componentNodes, agentFlowExecutedData, - iterationContext + iterationContext, + loopCounts ) // Handle human input if present let humanInputAction: Record | undefined + let updatedHumanInput = humanInput if (agentFlowExecutedData.length) { const lastNodeOutput = agentFlowExecutedData[agentFlowExecutedData.length - 1]?.data?.output as ICommonObject | undefined humanInputAction = lastNodeOutput?.humanInputAction } + // This is when human in the loop is resumed if (humanInput && nodeId === humanInput.startNodeId) { reactFlowNodeData.inputs = { ...reactFlowNodeData.inputs, humanInput } // Remove the stopped humanInput from execution data agentFlowExecutedData = agentFlowExecutedData.filter((execData) => execData.nodeId !== nodeId) + + // Clear humanInput after it's been consumed to prevent subsequent humanInputAgentflow nodes from proceeding + logger.debug(`๐Ÿงน Clearing humanInput after consumption by node: ${nodeId}`) + updatedHumanInput = undefined } // Check if this is the last node for streaming purpose @@ -902,13 +1158,18 @@ const executeNode = async ({ // Prepare run parameters const runParams = { + orgId, + workspaceId, + subscriptionId, chatId, sessionId, chatflowid: chatflow.id, + chatflowId: chatflow.id, apiMessageId: flowConfig.apiMessageId, logger, appDataSource, databaseEntities, + usageCacheManager, componentNodes, cachePool, analytic: chatflow.analytic, @@ -917,12 +1178,14 @@ const executeNode = async ({ isLastNode, sseStreamer, pastChatHistory, + prependedChatHistory, agentflowRuntime, abortController, analyticHandlers, parentTraceIds, humanInputAction, - iterationContext + iterationContext, + evaluationRunId } // Execute node @@ -972,7 +1235,8 @@ const executeNode = async ({ index: i, value: item, isFirst: i === 0, - isLast: i === results.input.iterationInput.length - 1 + isLast: i === results.input.iterationInput.length - 1, + sessionId: sessionId } try { @@ -982,7 +1246,9 @@ const executeNode = async ({ incomingInput, chatflow: iterationChatflow, chatId, + evaluationRunId, appDataSource, + usageCacheManager, telemetry, cachePool, sseStreamer, @@ -996,7 +1262,11 @@ const executeNode = async ({ iterationContext: { ...iterationContext, agentflowRuntime - } + }, + orgId, + workspaceId, + subscriptionId, + productId }) // Store the result @@ -1023,7 +1293,7 @@ const executeNode = async ({ if (parentExecutionId) { try { logger.debug(` ๐Ÿ“ Updating parent execution ${parentExecutionId} with iteration ${i + 1} data`) - await updateExecution(appDataSource, parentExecutionId, { + await updateExecution(appDataSource, parentExecutionId, workspaceId, { executionData: JSON.stringify(agentFlowExecutedData) }) } catch (error) { @@ -1112,7 +1382,7 @@ const executeNode = async ({ sseStreamer?.streamActionEvent(chatId, humanInputAction) - return { result: results, shouldStop: true, agentFlowExecutedData } + return { result: results, shouldStop: true, agentFlowExecutedData, humanInput: updatedHumanInput } } // Stop going through the current route if the node is a agent node waiting for human input before using the tool @@ -1159,10 +1429,10 @@ const executeNode = async ({ sseStreamer?.streamActionEvent(chatId, humanInputAction) - return { result: results, shouldStop: true, agentFlowExecutedData } + return { result: results, shouldStop: true, agentFlowExecutedData, humanInput: updatedHumanInput } } - return { result: results, agentFlowExecutedData } + return { result: results, agentFlowExecutedData, humanInput: updatedHumanInput } } catch (error) { logger.error(`[server]: Error executing node ${nodeId}: ${getErrorMessage(error)}`) throw error @@ -1184,6 +1454,20 @@ const checkForMultipleStartNodes = (startingNodeIds: string[], isRecursive: bool } } +const parseFormStringToJson = (formString: string): Record => { + const result: Record = {} + const lines = formString.split('\n') + + for (const line of lines) { + const [key, value] = line.split(': ').map((part) => part.trim()) + if (key && value) { + result[key] = value + } + } + + return result +} + /* * Function to traverse the flow graph and execute the nodes */ @@ -1192,8 +1476,10 @@ export const executeAgentFlow = async ({ incomingInput, chatflow, chatId, + evaluationRunId, appDataSource, telemetry, + usageCacheManager, cachePool, sseStreamer, baseURL, @@ -1204,7 +1490,11 @@ export const executeAgentFlow = async ({ isRecursive = false, parentExecutionId, iterationContext, - isTool = false + isTool = false, + orgId, + workspaceId, + subscriptionId, + productId }: IExecuteAgentFlowParams) => { logger.debug('\n๐Ÿš€ Starting flow execution') @@ -1214,8 +1504,19 @@ export const executeAgentFlow = async ({ const uploads = incomingInput.uploads const userMessageDateTime = new Date() const chatflowid = chatflow.id - const sessionId = incomingInput.sessionId ?? chatId + const sessionId = iterationContext?.sessionId || overrideConfig.sessionId || chatId const humanInput: IHumanInput | undefined = incomingInput.humanInput + + // Validate history schema if provided + if (incomingInput.history && incomingInput.history.length > 0) { + if (!validateHistorySchema(incomingInput.history)) { + throw new Error( + 'Invalid history format. Each history item must have: ' + '{ role: "apiMessage" | "userMessage", content: string }' + ) + } + } + + const prependedChatHistory = incomingInput.history ?? [] const apiMessageId = uuidv4() /*** Get chatflows and prepare data ***/ @@ -1281,7 +1582,8 @@ export const executeAgentFlow = async ({ const previousExecutions = await appDataSource.getRepository(Execution).find({ where: { sessionId, - agentflowId: chatflowid + agentflowId: chatflowid, + workspaceId }, order: { createdDate: 'DESC' @@ -1293,6 +1595,47 @@ export const executeAgentFlow = async ({ } } + // If the state is persistent, get the state from the previous execution + const startPersistState = nodes.find((node) => node.data.name === 'startAgentflow')?.data.inputs?.startPersistState + if (startPersistState === true && previousExecution) { + const previousExecutionData = (JSON.parse(previousExecution.executionData) as IAgentflowExecutedData[]) ?? [] + + let previousState = {} + if (Array.isArray(previousExecutionData) && previousExecutionData.length) { + for (const execData of previousExecutionData.reverse()) { + if (execData.data.state) { + previousState = execData.data.state + break + } + } + } + + // Check if startState has been overridden from overrideConfig.startState and is enabled + const startAgentflowNode = nodes.find((node) => node.data.name === 'startAgentflow') + const isStartStateEnabled = + nodeOverrides && startAgentflowNode + ? nodeOverrides[startAgentflowNode.data.label]?.find((param: any) => param.name === 'startState')?.enabled ?? false + : false + + if (isStartStateEnabled && overrideConfig?.startState) { + if (Array.isArray(overrideConfig.startState)) { + // Handle array format: [{"key": "foo", "value": "foo4"}] + const overrideStateObj: ICommonObject = {} + for (const item of overrideConfig.startState) { + if (item.key && item.value !== undefined) { + overrideStateObj[item.key] = item.value + } + } + previousState = { ...previousState, ...overrideStateObj } + } else if (typeof overrideConfig.startState === 'object') { + // Object override: "startState": {...} + previousState = { ...previousState, ...overrideConfig.startState } + } + } + + agentflowRuntime.state = previousState + } + // If the start input type is form input, get the form values from the previous execution (form values are persisted in the same session) if (startInputType === 'formInput' && previousExecution) { const previousExecutionData = (JSON.parse(previousExecution.executionData) as IAgentflowExecutedData[]) ?? [] @@ -1302,41 +1645,102 @@ export const executeAgentFlow = async ({ if (previousStartAgent) { const previousStartAgentOutput = previousStartAgent.data.output if (previousStartAgentOutput && typeof previousStartAgentOutput === 'object' && 'form' in previousStartAgentOutput) { - agentflowRuntime.form = previousStartAgentOutput.form + const formValues = previousStartAgentOutput.form + if (typeof formValues === 'string') { + agentflowRuntime.form = parseFormStringToJson(formValues) + } else { + agentflowRuntime.form = formValues + } } } } // If it is human input, find the last checkpoint and resume - if (humanInput?.startNodeId) { + // Skip human input resumption for recursive iteration calls - they should start fresh + if (humanInput && !(isRecursive && iterationContext)) { if (!previousExecution) { throw new Error(`No previous execution found for session ${sessionId}`) } - if (previousExecution.state !== 'STOPPED') { + let executionData = JSON.parse(previousExecution.executionData) as IAgentflowExecutedData[] + let shouldUpdateExecution = false + + // Handle different execution states + if (previousExecution.state === 'STOPPED') { + // Normal case - execution is stopped and ready to resume + logger.debug(` โœ… Previous execution is in STOPPED state, ready to resume`) + } else if (previousExecution.state === 'ERROR') { + // Check if second-to-last execution item is STOPPED and last is ERROR + if (executionData.length >= 2) { + const lastItem = executionData[executionData.length - 1] + const secondLastItem = executionData[executionData.length - 2] + + if (lastItem.status === 'ERROR' && secondLastItem.status === 'STOPPED') { + logger.debug(` ๐Ÿ”„ Found ERROR after STOPPED - removing last error item to allow retry`) + logger.debug(` Removing: ${lastItem.nodeId} (${lastItem.nodeLabel}) - ${lastItem.data?.error || 'Unknown error'}`) + + // Remove the last ERROR item + executionData = executionData.slice(0, -1) + shouldUpdateExecution = true + } else { + throw new Error( + `Cannot resume execution ${previousExecution.id} because it is in 'ERROR' state ` + + `and the previous item is not in 'STOPPED' state. Only executions that ended with a ` + + `STOPPED state (or ERROR after STOPPED) can be resumed.` + ) + } + } else { + throw new Error( + `Cannot resume execution ${previousExecution.id} because it is in 'ERROR' state ` + + `with insufficient execution data. Only executions in 'STOPPED' state can be resumed.` + ) + } + } else { throw new Error( `Cannot resume execution ${previousExecution.id} because it is in '${previousExecution.state}' state. ` + - `Only executions in 'STOPPED' state can be resumed.` + `Only executions in 'STOPPED' state (or 'ERROR' after 'STOPPED') can be resumed.` ) } - startingNodeIds.push(humanInput.startNodeId) - checkForMultipleStartNodes(startingNodeIds, isRecursive, nodes) + let startNodeId = humanInput.startNodeId - const executionData = JSON.parse(previousExecution.executionData) as IAgentflowExecutedData[] + // If startNodeId is not provided, find the last node with STOPPED status from execution data + if (!startNodeId) { + // Search in reverse order to find the last (most recent) STOPPED node + const stoppedNode = [...executionData].reverse().find((data) => data.status === 'STOPPED') - // Verify that the humanInputAgentflow node exists in previous execution - const humanInputNodeExists = executionData.some((data) => data.nodeId === humanInput.startNodeId) + if (!stoppedNode) { + throw new Error('No stopped node found in previous execution data to resume from') + } - if (!humanInputNodeExists) { + startNodeId = stoppedNode.nodeId + logger.debug(` ๐Ÿ” Auto-detected stopped node to resume from: ${startNodeId} (${stoppedNode.nodeLabel})`) + } + + // Verify that the node exists in previous execution + const nodeExists = executionData.some((data) => data.nodeId === startNodeId) + + if (!nodeExists) { throw new Error( - `Human Input node ${humanInput.startNodeId} not found in previous execution. ` + + `Node ${startNodeId} not found in previous execution. ` + `This could indicate an invalid resume attempt or a modified flow.` ) } + startingNodeIds.push(startNodeId) + checkForMultipleStartNodes(startingNodeIds, isRecursive, nodes) + agentFlowExecutedData.push(...executionData) + // Update execution data if we removed an error item + if (shouldUpdateExecution) { + logger.debug(` ๐Ÿ“ Updating execution data after removing error item`) + await updateExecution(appDataSource, previousExecution.id, workspaceId, { + executionData: JSON.stringify(executionData), + state: 'INPROGRESS' + }) + } + // Get last state const lastState = executionData[executionData.length - 1].data.state @@ -1344,11 +1748,14 @@ export const executeAgentFlow = async ({ agentflowRuntime.state = (lastState as ICommonObject) ?? {} // Update execution state to INPROGRESS - await updateExecution(appDataSource, previousExecution.id, { + await updateExecution(appDataSource, previousExecution.id, workspaceId, { state: 'INPROGRESS' }) newExecution = previousExecution parentExecutionId = previousExecution.id + + // Update humanInput with the resolved startNodeId + humanInput.startNodeId = startNodeId } else if (isRecursive && parentExecutionId) { const { startingNodeIds: startingNodeIdsFromFlow } = getStartingNode(nodeDependencies) startingNodeIds.push(...startingNodeIdsFromFlow) @@ -1357,15 +1764,15 @@ export const executeAgentFlow = async ({ // For recursive calls with a valid parent execution ID, don't create a new execution // Instead, fetch the parent execution to use it const parentExecution = await appDataSource.getRepository(Execution).findOne({ - where: { id: parentExecutionId } + where: { id: parentExecutionId, workspaceId } }) if (parentExecution) { - logger.debug(` ๐Ÿ“ Using parent execution ID: ${parentExecutionId} for recursive call`) + logger.debug(` ๐Ÿ“ Using parent execution ID: ${parentExecutionId} for recursive call (iteration: ${!!iterationContext})`) newExecution = parentExecution } else { console.warn(` โš ๏ธ Parent execution ID ${parentExecutionId} not found, will create new execution`) - newExecution = await addExecution(appDataSource, chatflowid, agentFlowExecutedData, sessionId) + newExecution = await addExecution(appDataSource, chatflowid, agentFlowExecutedData, sessionId, workspaceId) parentExecutionId = newExecution.id } } else { @@ -1374,7 +1781,7 @@ export const executeAgentFlow = async ({ checkForMultipleStartNodes(startingNodeIds, isRecursive, nodes) // Only create a new execution if this is not a recursive call - newExecution = await addExecution(appDataSource, chatflowid, agentFlowExecutedData, sessionId) + newExecution = await addExecution(appDataSource, chatflowid, agentFlowExecutedData, sessionId, workspaceId) parentExecutionId = newExecution.id } @@ -1395,7 +1802,7 @@ export const executeAgentFlow = async ({ .find({ where: { chatflowid, - chatId + sessionId }, order: { createdDate: 'ASC' @@ -1408,12 +1815,44 @@ export const executeAgentFlow = async ({ role: message.role === 'userMessage' ? 'user' : 'assistant' } - // Only add additional_kwargs when fileUploads or artifacts exists and is not empty - if ((message.fileUploads && message.fileUploads !== '') || (message.artifacts && message.artifacts !== '')) { + const hasFileUploads = message.fileUploads && message.fileUploads !== '' + const hasArtifacts = message.artifacts && message.artifacts !== '' + const hasFileAnnotations = message.fileAnnotations && message.fileAnnotations !== '' + const hasUsedTools = message.usedTools && message.usedTools !== '' + + if (hasFileUploads || hasArtifacts || hasFileAnnotations || hasUsedTools) { mappedMessage.additional_kwargs = {} - if (message.fileUploads && message.fileUploads !== '') { - mappedMessage.additional_kwargs.fileUploads = message.fileUploads + if (hasFileUploads) { + try { + mappedMessage.additional_kwargs.fileUploads = JSON.parse(message.fileUploads!) + } catch { + mappedMessage.additional_kwargs.fileUploads = message.fileUploads + } + } + + if (hasArtifacts) { + try { + mappedMessage.additional_kwargs.artifacts = JSON.parse(message.artifacts!) + } catch { + mappedMessage.additional_kwargs.artifacts = message.artifacts + } + } + + if (hasFileAnnotations) { + try { + mappedMessage.additional_kwargs.fileAnnotations = JSON.parse(message.fileAnnotations!) + } catch { + mappedMessage.additional_kwargs.fileAnnotations = message.fileAnnotations + } + } + + if (hasUsedTools) { + try { + mappedMessage.additional_kwargs.usedTools = JSON.parse(message.usedTools!) + } catch { + mappedMessage.additional_kwargs.usedTools = message.usedTools + } } } @@ -1424,12 +1863,26 @@ export const executeAgentFlow = async ({ let iterations = 0 let currentHumanInput = humanInput + // For iteration calls, clear human input since they should start fresh + if (isRecursive && iterationContext && humanInput) { + currentHumanInput = undefined + } + let analyticHandlers: AnalyticHandler | undefined let parentTraceIds: ICommonObject | undefined try { if (chatflow.analytic) { - analyticHandlers = AnalyticHandler.getInstance({ inputs: {} } as any, { + // Override config analytics + let analyticInputs: ICommonObject = {} + if (overrideConfig?.analytics && Object.keys(overrideConfig.analytics).length > 0) { + analyticInputs = { + ...overrideConfig.analytics + } + } + analyticHandlers = AnalyticHandler.getInstance({ inputs: { analytics: analyticInputs } } as any, { + orgId, + workspaceId, appDataSource, databaseEntities, componentNodes, @@ -1486,10 +1939,13 @@ export const executeAgentFlow = async ({ chatId, sessionId, apiMessageId, + evaluationRunId, parentExecutionId, isInternal, pastChatHistory, + prependedChatHistory, appDataSource, + usageCacheManager, telemetry, componentNodes, cachePool, @@ -1508,13 +1964,23 @@ export const executeAgentFlow = async ({ parentTraceIds, analyticHandlers, isRecursive, - iterationContext + iterationContext, + loopCounts, + orgId, + workspaceId, + subscriptionId, + productId }) if (executionResult.agentFlowExecutedData) { agentFlowExecutedData = executionResult.agentFlowExecutedData } + // Update humanInput if it was cleared by the executed node + if (executionResult.humanInput !== currentHumanInput) { + currentHumanInput = executionResult.humanInput + } + if (executionResult.shouldStop) { status = 'STOPPED' break @@ -1568,7 +2034,8 @@ export const executeAgentFlow = async ({ nodeExecutionQueue, waitingNodes, loopCounts, - abortController + sseStreamer, + chatId }) // Update humanInput if it was changed @@ -1607,7 +2074,7 @@ export const executeAgentFlow = async ({ if (!isRecursive) { sseStreamer?.streamAgentFlowExecutedDataEvent(chatId, agentFlowExecutedData) - await updateExecution(appDataSource, newExecution.id, { + await updateExecution(appDataSource, newExecution.id, workspaceId, { executionData: JSON.stringify(agentFlowExecutedData), state: errorStatus }) @@ -1642,7 +2109,7 @@ export const executeAgentFlow = async ({ // Only update execution record if this is not a recursive call if (!isRecursive) { - await updateExecution(appDataSource, newExecution.id, { + await updateExecution(appDataSource, newExecution.id, workspaceId, { executionData: JSON.stringify(agentFlowExecutedData), state: status }) @@ -1717,7 +2184,7 @@ export const executeAgentFlow = async ({ role: 'userMessage', content: finalUserInput, chatflowid, - chatType: isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, + chatType: evaluationRunId ? ChatType.EVALUATION : isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, chatId, sessionId, createdDate: userMessageDateTime, @@ -1732,7 +2199,7 @@ export const executeAgentFlow = async ({ role: 'apiMessage', content: content, chatflowid, - chatType: isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, + chatType: evaluationRunId ? ChatType.EVALUATION : isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, chatId, sessionId, executionId: newExecution.id @@ -1744,6 +2211,8 @@ export const executeAgentFlow = async ({ if (chatflow.followUpPrompts) { const followUpPromptsConfig = JSON.parse(chatflow.followUpPrompts) const followUpPrompts = await generateFollowUpPrompts(followUpPromptsConfig, apiMessage.content, { + orgId, + workspaceId, chatId, chatflowid, appDataSource, @@ -1760,13 +2229,19 @@ export const executeAgentFlow = async ({ logger.debug(`[server]: Finished running agentflow ${chatflowid}`) - await telemetry.sendTelemetry('prediction_sent', { - version: await getAppVersion(), - chatflowId: chatflowid, - chatId, - type: isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, - flowGraph: getTelemetryFlowObj(nodes, edges) - }) + await telemetry.sendTelemetry( + 'prediction_sent', + { + version: await getAppVersion(), + chatflowId: chatflowid, + chatId, + type: evaluationRunId ? ChatType.EVALUATION : isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, + flowGraph: getTelemetryFlowObj(nodes, edges), + productId, + subscriptionId + }, + orgId + ) /*** Prepare response ***/ let result: ICommonObject = {} @@ -1781,5 +2256,27 @@ export const executeAgentFlow = async ({ if (sessionId) result.sessionId = sessionId + if (shouldAutoPlayTTS(chatflow.textToSpeech) && result.text) { + const options = { + orgId, + chatflowid, + chatId, + appDataSource, + databaseEntities + } + + if (sseStreamer) { + await generateTTSForResponseStream( + result.text, + chatflow.textToSpeech, + options, + chatId, + chatMessage?.id, + sseStreamer, + abortController + ) + } + } + return result } diff --git a/packages/server/src/utils/buildChatflow.ts b/packages/server/src/utils/buildChatflow.ts index d0b5e0fbe..c191876aa 100644 --- a/packages/server/src/utils/buildChatflow.ts +++ b/packages/server/src/utils/buildChatflow.ts @@ -6,6 +6,7 @@ import { omit } from 'lodash' import { IFileUpload, convertSpeechToText, + convertTextToSpeechStream, ICommonObject, addSingleFileToStorage, generateFollowUpPrompts, @@ -15,15 +16,17 @@ import { mapExtToInputField, getFileFromUpload, removeSpecificFileFromUpload, - handleEscapeCharacters + EvaluationRunner, + handleEscapeCharacters, + IServerSideEventStreamer } from 'flowise-components' import { StatusCodes } from 'http-status-codes' import { IncomingInput, IMessage, INodeData, - IReactFlowObject, IReactFlowNode, + IReactFlowObject, IDepthQueue, ChatType, IChatMessage, @@ -56,18 +59,87 @@ import { constructGraphs, getAPIOverrideConfig } from '../utils' -import { validateChatflowAPIKey } from './validateKey' +import { validateFlowAPIKey } from './validateKey' import logger from './logger' import { utilAddChatMessage } from './addChatMesage' +import { checkPredictions, checkStorage, updatePredictionsUsage, updateStorageUsage } from './quotaUsage' import { buildAgentGraph } from './buildAgentGraph' import { getErrorMessage } from '../errors/utils' import { FLOWISE_METRIC_COUNTERS, FLOWISE_COUNTER_STATUS, IMetricsProvider } from '../Interface.Metrics' +import { getWorkspaceSearchOptions } from '../enterprise/utils/ControllerServiceUtils' import { OMIT_QUEUE_JOB_DATA } from './constants' import { executeAgentFlow } from './buildAgentflow' +import { Workspace } from '../enterprise/database/entities/workspace.entity' +import { Organization } from '../enterprise/database/entities/organization.entity' + +const shouldAutoPlayTTS = (textToSpeechConfig: string | undefined | null): boolean => { + if (!textToSpeechConfig) return false + try { + const config = typeof textToSpeechConfig === 'string' ? JSON.parse(textToSpeechConfig) : textToSpeechConfig + for (const providerKey in config) { + const provider = config[providerKey] + if (provider && provider.status === true && provider.autoPlay === true) { + return true + } + } + return false + } catch (error) { + logger.error(`Error parsing textToSpeechConfig: ${getErrorMessage(error)}`) + return false + } +} + +const generateTTSForResponseStream = async ( + responseText: string, + textToSpeechConfig: string | undefined, + options: ICommonObject, + chatId: string, + chatMessageId: string, + sseStreamer: IServerSideEventStreamer, + abortController?: AbortController +): Promise => { + try { + if (!textToSpeechConfig) return + const config = typeof textToSpeechConfig === 'string' ? JSON.parse(textToSpeechConfig) : textToSpeechConfig + + let activeProviderConfig = null + for (const providerKey in config) { + const provider = config[providerKey] + if (provider && provider.status === true) { + activeProviderConfig = { + name: providerKey, + credentialId: provider.credentialId, + voice: provider.voice, + model: provider.model + } + break + } + } + + if (!activeProviderConfig) return + + await convertTextToSpeechStream( + responseText, + activeProviderConfig, + options, + abortController || new AbortController(), + (format: string) => { + sseStreamer.streamTTSStartEvent(chatId, chatMessageId, format) + }, + (chunk: Buffer) => { + const audioBase64 = chunk.toString('base64') + sseStreamer.streamTTSDataEvent(chatId, chatMessageId, audioBase64) + }, + () => { + sseStreamer.streamTTSEndEvent(chatId, chatMessageId) + } + ) + } catch (error) { + logger.error(`[server]: TTS streaming failed: ${getErrorMessage(error)}`) + sseStreamer.streamTTSEndEvent(chatId, chatMessageId) + } +} -/* - * Initialize the ending node to be executed - */ const initEndingNode = async ({ endingNodeIds, componentNodes, @@ -230,15 +302,22 @@ export const executeFlow = async ({ incomingInput, chatflow, chatId, + isEvaluation, + evaluationRunId, appDataSource, telemetry, cachePool, + usageCacheManager, sseStreamer, baseURL, isInternal, files, signal, - isTool + isTool, + orgId, + workspaceId, + subscriptionId, + productId }: IExecuteFlowParams) => { // Ensure incomingInput has all required properties with default values incomingInput = { @@ -265,6 +344,8 @@ export const executeFlow = async ({ if (uploads) { fileUploads = uploads for (let i = 0; i < fileUploads.length; i += 1) { + await checkStorage(orgId, subscriptionId, usageCacheManager) + const upload = fileUploads[i] // if upload in an image, a rag file, or audio @@ -273,7 +354,8 @@ export const executeFlow = async ({ const splitDataURI = upload.data.split(',') const bf = Buffer.from(splitDataURI.pop() || '', 'base64') const mime = splitDataURI[0].split(':')[1].split(';')[0] - await addSingleFileToStorage(mime, bf, filename, chatflowid, chatId) + const { totalSize } = await addSingleFileToStorage(mime, bf, filename, orgId, chatflowid, chatId) + await updateStorageUsage(orgId, workspaceId, totalSize, usageCacheManager) upload.type = 'stored-file' // Omit upload.data since we don't store the content in database fileUploads[i] = omit(upload, ['data']) @@ -287,7 +369,7 @@ export const executeFlow = async ({ // Run Speech to Text conversion if (upload.mime === 'audio/webm' || upload.mime === 'audio/mp4' || upload.mime === 'audio/ogg') { - logger.debug(`Attempting a speech to text conversion...`) + logger.debug(`[server]: [${orgId}]: Attempting a speech to text conversion...`) let speechToTextConfig: ICommonObject = {} if (chatflow.speechToText) { const speechToTextProviders = JSON.parse(chatflow.speechToText) @@ -302,13 +384,14 @@ export const executeFlow = async ({ } if (speechToTextConfig) { const options: ICommonObject = { + orgId, chatId, chatflowid, appDataSource, databaseEntities: databaseEntities } const speechToTextResult = await convertSpeechToText(upload, speechToTextConfig, options) - logger.debug(`Speech to text result: ${speechToTextResult}`) + logger.debug(`[server]: [${orgId}]: Speech to text result: ${speechToTextResult}`) if (speechToTextResult) { incomingInput.question = speechToTextResult question = speechToTextResult @@ -329,11 +412,21 @@ export const executeFlow = async ({ if (files?.length) { overrideConfig = { ...incomingInput } for (const file of files) { + await checkStorage(orgId, subscriptionId, usageCacheManager) + const fileNames: string[] = [] const fileBuffer = await getFileFromUpload(file.path ?? file.key) // Address file name with special characters: https://github.com/expressjs/multer/issues/1104 file.originalname = Buffer.from(file.originalname, 'latin1').toString('utf8') - const storagePath = await addArrayFilesToStorage(file.mimetype, fileBuffer, file.originalname, fileNames, chatflowid) + const { path: storagePath, totalSize } = await addArrayFilesToStorage( + file.mimetype, + fileBuffer, + file.originalname, + fileNames, + orgId, + chatflowid + ) + await updateStorageUsage(orgId, workspaceId, totalSize, usageCacheManager) const fileInputFieldFromMimeType = mapMimeTypeToInputField(file.mimetype) @@ -382,16 +475,22 @@ export const executeFlow = async ({ incomingInput, chatflow, chatId, + evaluationRunId, appDataSource, telemetry, cachePool, + usageCacheManager, sseStreamer, baseURL, isInternal, uploadedFilesContent, fileUploads, signal, - isTool + isTool, + orgId, + workspaceId, + subscriptionId, + productId }) } @@ -443,11 +542,12 @@ export const executeFlow = async ({ }) /*** Get API Config ***/ - const availableVariables = await appDataSource.getRepository(Variable).find() + const availableVariables = await appDataSource.getRepository(Variable).findBy(getWorkspaceSearchOptions(workspaceId)) const { nodeOverrides, variableOverrides, apiOverrideStatus } = getAPIOverrideConfig(chatflow) const flowConfig: IFlowConfig = { chatflowid, + chatflowId: chatflow.id, chatId, sessionId, chatHistory, @@ -455,7 +555,7 @@ export const executeFlow = async ({ ...incomingInput.overrideConfig } - logger.debug(`[server]: Start building flow ${chatflowid}`) + logger.debug(`[server]: [${orgId}]: Start building flow ${chatflowid}`) /*** BFS to traverse from Starting Nodes to Ending Node ***/ const reactFlowNodes = await buildFlow({ @@ -479,9 +579,15 @@ export const executeFlow = async ({ availableVariables, variableOverrides, cachePool, + usageCacheManager, isUpsert: false, uploads, - baseURL + baseURL, + orgId, + workspaceId, + subscriptionId, + updateStorageUsage, + checkStorage }) const setVariableNodesOutput = getSetVariableNodesOutput(reactFlowNodes) @@ -506,7 +612,9 @@ export const executeFlow = async ({ shouldStreamResponse: true, // agentflow is always streamed cachePool, baseURL, - signal + signal, + orgId, + workspaceId }) if (streamResults) { @@ -515,7 +623,7 @@ export const executeFlow = async ({ role: 'userMessage', content: incomingInput.question, chatflowid: agentflow.id, - chatType: isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, + chatType: isEvaluation ? ChatType.EVALUATION : isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, chatId, memoryType, sessionId, @@ -530,7 +638,7 @@ export const executeFlow = async ({ role: 'apiMessage', content: finalResult, chatflowid: agentflow.id, - chatType: isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, + chatType: isEvaluation ? ChatType.EVALUATION : isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, chatId, memoryType, sessionId @@ -556,13 +664,17 @@ export const executeFlow = async ({ } const chatMessage = await utilAddChatMessage(apiMessage, appDataSource) - await telemetry.sendTelemetry('agentflow_prediction_sent', { - version: await getAppVersion(), - agentflowId: agentflow.id, - chatId, - type: isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, - flowGraph: getTelemetryFlowObj(nodes, edges) - }) + await telemetry.sendTelemetry( + 'agentflow_prediction_sent', + { + version: await getAppVersion(), + agentflowId: agentflow.id, + chatId, + type: isEvaluation ? ChatType.EVALUATION : isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, + flowGraph: getTelemetryFlowObj(nodes, edges) + }, + orgId + ) // Find the previous chat message with the same action id and remove the action if (incomingInput.action && Object.keys(incomingInput.action).length) { @@ -596,6 +708,7 @@ export const executeFlow = async ({ // Prepare response let result: ICommonObject = {} result.text = finalResult + result.question = incomingInput.question result.chatId = chatId result.chatMessageId = chatMessage?.id @@ -605,7 +718,6 @@ export const executeFlow = async ({ if (finalAction && Object.keys(finalAction).length) result.action = finalAction if (Object.keys(setVariableNodesOutput).length) result.flowVariables = setVariableNodesOutput result.followUpPrompts = JSON.stringify(apiMessage.followUpPrompts) - return result } return undefined @@ -643,16 +755,23 @@ export const executeFlow = async ({ /*** Prepare run params ***/ const runParams = { + orgId, + workspaceId, + subscriptionId, chatId, chatflowid, apiMessageId, logger, appDataSource, databaseEntities, + usageCacheManager, analytic: chatflow.analytic, uploads, prependMessages, - ...(isStreamValid && { sseStreamer, shouldStreamResponse: isStreamValid }) + ...(isStreamValid && { sseStreamer, shouldStreamResponse: isStreamValid }), + evaluationRunId, + updateStorageUsage, + checkStorage } /*** Run the ending node ***/ @@ -669,7 +788,7 @@ export const executeFlow = async ({ role: 'userMessage', content: question, chatflowid, - chatType: isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, + chatType: isEvaluation ? ChatType.EVALUATION : isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, chatId, memoryType, sessionId, @@ -701,6 +820,8 @@ export const executeFlow = async ({ rawOutput: resultText, appDataSource, databaseEntities, + workspaceId, + orgId, logger } const customFuncNodeInstance = new nodeModule.nodeClass() @@ -725,7 +846,7 @@ export const executeFlow = async ({ role: 'apiMessage', content: resultText, chatflowid, - chatType: isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, + chatType: isEvaluation ? ChatType.EVALUATION : isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, chatId, memoryType, sessionId @@ -749,15 +870,24 @@ export const executeFlow = async ({ const chatMessage = await utilAddChatMessage(apiMessage, appDataSource) - logger.debug(`[server]: Finished running ${endingNodeData.label} (${endingNodeData.id})`) - - await telemetry.sendTelemetry('prediction_sent', { - version: await getAppVersion(), - chatflowId: chatflowid, - chatId, - type: isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, - flowGraph: getTelemetryFlowObj(nodes, edges) - }) + logger.debug(`[server]: [${orgId}]: Finished running ${endingNodeData.label} (${endingNodeData.id})`) + if (evaluationRunId) { + const metrics = await EvaluationRunner.getAndDeleteMetrics(evaluationRunId) + result.metrics = metrics + } + await telemetry.sendTelemetry( + 'prediction_sent', + { + version: await getAppVersion(), + chatflowId: chatflowid, + chatId, + type: isEvaluation ? ChatType.EVALUATION : isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, + flowGraph: getTelemetryFlowObj(nodes, edges), + productId, + subscriptionId + }, + orgId + ) /*** Prepare response ***/ result.question = incomingInput.question // return the question in the response, this is used when input text is empty but question is in audio format @@ -770,6 +900,17 @@ export const executeFlow = async ({ if (memoryType) result.memoryType = memoryType if (Object.keys(setVariableNodesOutput).length) result.flowVariables = setVariableNodesOutput + if (shouldAutoPlayTTS(chatflow.textToSpeech) && result.text) { + const options = { + orgId, + chatflowid, + chatId, + appDataSource, + databaseEntities + } + await generateTTSForResponseStream(result.text, chatflow.textToSpeech, options, chatId, chatMessage?.id, sseStreamer, signal) + } + return result } } @@ -824,12 +965,13 @@ const checkIfStreamValid = async ( } /** - * Build/Data Preperation for execute function + * Build/Data Preparation for execute function * @param {Request} req * @param {boolean} isInternal */ export const utilBuildChatflow = async (req: Request, isInternal: boolean = false): Promise => { const appServer = getRunningExpressApp() + const chatflowid = req.params.id // Check if chatflow exists @@ -841,7 +983,6 @@ export const utilBuildChatflow = async (req: Request, isInternal: boolean = fals } const isAgentFlow = chatflow.type === 'MULTIAGENT' - const httpProtocol = req.get('x-forwarded-proto') || req.protocol const baseURL = `${httpProtocol}://${req.get('host')}` const incomingInput: IncomingInput = req.body || {} // Ensure incomingInput is never undefined @@ -849,16 +990,56 @@ export const utilBuildChatflow = async (req: Request, isInternal: boolean = fals const files = (req.files as Express.Multer.File[]) || [] const abortControllerId = `${chatflow.id}_${chatId}` const isTool = req.get('flowise-tool') === 'true' + const isEvaluation: boolean = req.headers['X-Flowise-Evaluation'] || req.body.evaluation + let evaluationRunId = '' + evaluationRunId = req.body.evaluationRunId + if (isEvaluation && chatflow.type !== 'AGENTFLOW' && req.body.evaluationRunId) { + // this is needed for the collection of token metrics for non-agent flows, + // for agentflows the execution trace has the info needed + const newEval = { + evaluation: { + status: true, + evaluationRunId + } + } + chatflow.analytic = JSON.stringify(newEval) + } + + let organizationId = '' try { // Validate API Key if its external API request if (!isInternal) { - const isKeyValidated = await validateChatflowAPIKey(req, chatflow) + const isKeyValidated = await validateFlowAPIKey(req, chatflow) if (!isKeyValidated) { throw new InternalFlowiseError(StatusCodes.UNAUTHORIZED, `Unauthorized`) } } + // This can be public API, so we can only get orgId from the chatflow + const chatflowWorkspaceId = chatflow.workspaceId + const workspace = await appServer.AppDataSource.getRepository(Workspace).findOneBy({ + id: chatflowWorkspaceId + }) + if (!workspace) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Workspace ${chatflowWorkspaceId} not found`) + } + const workspaceId = workspace.id + + const org = await appServer.AppDataSource.getRepository(Organization).findOneBy({ + id: workspace.organizationId + }) + if (!org) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Organization ${workspace.organizationId} not found`) + } + + const orgId = org.id + organizationId = orgId + const subscriptionId = org.subscriptionId as string + const productId = await appServer.identityManager.getProductIdFromSubscription(subscriptionId) + + await checkPredictions(orgId, subscriptionId, appServer.usageCacheManager) + const executeData: IExecuteFlowParams = { incomingInput, // Use the defensively created incomingInput variable chatflow, @@ -866,18 +1047,25 @@ export const utilBuildChatflow = async (req: Request, isInternal: boolean = fals baseURL, isInternal, files, + isEvaluation, + evaluationRunId, appDataSource: appServer.AppDataSource, sseStreamer: appServer.sseStreamer, telemetry: appServer.telemetry, cachePool: appServer.cachePool, componentNodes: appServer.nodesPool.componentNodes, - isTool // used to disable streaming if incoming request its from ChatflowTool + isTool, // used to disable streaming if incoming request its from ChatflowTool + usageCacheManager: appServer.usageCacheManager, + orgId, + workspaceId, + subscriptionId, + productId } if (process.env.MODE === MODE.QUEUE) { const predictionQueue = appServer.queueManager.getQueue('prediction') const job = await predictionQueue.addJob(omit(executeData, OMIT_QUEUE_JOB_DATA)) - logger.debug(`[server]: Job added to queue: ${job.id}`) + logger.debug(`[server]: [${orgId}/${chatflow.id}/${chatId}]: Job added to queue: ${job.id}`) const queueEvents = predictionQueue.getQueueEvents() const result = await job.waitUntilFinished(queueEvents) @@ -885,7 +1073,7 @@ export const utilBuildChatflow = async (req: Request, isInternal: boolean = fals if (!result) { throw new Error('Job execution failed') } - + await updatePredictionsUsage(orgId, subscriptionId, workspaceId, appServer.usageCacheManager) incrementSuccessMetricCounter(appServer.metricsProvider, isInternal, isAgentFlow) return result } else { @@ -893,14 +1081,16 @@ export const utilBuildChatflow = async (req: Request, isInternal: boolean = fals const signal = new AbortController() appServer.abortControllerPool.add(abortControllerId, signal) executeData.signal = signal + const result = await executeFlow(executeData) appServer.abortControllerPool.remove(abortControllerId) + await updatePredictionsUsage(orgId, subscriptionId, workspaceId, appServer.usageCacheManager) incrementSuccessMetricCounter(appServer.metricsProvider, isInternal, isAgentFlow) return result } } catch (e) { - logger.error('[server]: Error:', e) + logger.error(`[server]:${organizationId}/${chatflow.id}/${chatId} Error:`, e) appServer.abortControllerPool.remove(`${chatflow.id}_${chatId}`) incrementFailedMetricCounter(appServer.metricsProvider, isInternal, isAgentFlow) if (e instanceof InternalFlowiseError && e.statusCode === StatusCodes.UNAUTHORIZED) { @@ -950,3 +1140,5 @@ const incrementFailedMetricCounter = (metricsProvider: IMetricsProvider, isInter ) } } + +export { shouldAutoPlayTTS, generateTTSForResponseStream } diff --git a/packages/server/src/utils/constants.ts b/packages/server/src/utils/constants.ts index 247446057..0d9caf949 100644 --- a/packages/server/src/utils/constants.ts +++ b/packages/server/src/utils/constants.ts @@ -1,3 +1,8 @@ +import Auth0SSO from '../enterprise/sso/Auth0SSO' +import AzureSSO from '../enterprise/sso/AzureSSO' +import GithubSSO from '../enterprise/sso/GithubSSO' +import GoogleSSO from '../enterprise/sso/GoogleSSO' + export const WHITELIST_URLS = [ '/api/v1/verify/apikey/', '/api/v1/chatflows/apikey/', @@ -19,10 +24,67 @@ export const WHITELIST_URLS = [ '/api/v1/version', '/api/v1/attachments', '/api/v1/metrics', - '/api/v1/nvidia-nim' + '/api/v1/nvidia-nim', + '/api/v1/auth/resolve', + '/api/v1/auth/login', + '/api/v1/auth/refreshToken', + '/api/v1/settings', + '/api/v1/account/logout', + '/api/v1/account/verify', + '/api/v1/account/register', + '/api/v1/account/resend-verification', + '/api/v1/account/forgot-password', + '/api/v1/account/reset-password', + '/api/v1/account/basic-auth', + '/api/v1/loginmethod', + '/api/v1/pricing', + '/api/v1/user/test', + '/api/v1/oauth2-credential/callback', + '/api/v1/oauth2-credential/refresh', + '/api/v1/text-to-speech/generate', + '/api/v1/text-to-speech/abort', + AzureSSO.LOGIN_URI, + AzureSSO.LOGOUT_URI, + AzureSSO.CALLBACK_URI, + GoogleSSO.LOGIN_URI, + GoogleSSO.LOGOUT_URI, + GoogleSSO.CALLBACK_URI, + Auth0SSO.LOGIN_URI, + Auth0SSO.LOGOUT_URI, + Auth0SSO.CALLBACK_URI, + GithubSSO.LOGIN_URI, + GithubSSO.LOGOUT_URI, + GithubSSO.CALLBACK_URI ] -export const OMIT_QUEUE_JOB_DATA = ['componentNodes', 'appDataSource', 'sseStreamer', 'telemetry', 'cachePool'] +export const enum GeneralErrorMessage { + UNAUTHORIZED = 'Unauthorized', + UNHANDLED_EDGE_CASE = 'Unhandled Edge Case', + INVALID_PASSWORD = 'Invalid Password', + NOT_ALLOWED_TO_DELETE_OWNER = 'Not Allowed To Delete Owner', + INTERNAL_SERVER_ERROR = 'Internal Server Error' +} + +export const enum GeneralSuccessMessage { + CREATED = 'Resource Created Successful', + UPDATED = 'Resource Updated Successful', + DELETED = 'Resource Deleted Successful', + FETCHED = 'Resource Fetched Successful', + LOGGED_IN = 'Login Successful', + LOGGED_OUT = 'Logout Successful' +} + +export const DOCUMENT_STORE_BASE_FOLDER = 'docustore' + +export const OMIT_QUEUE_JOB_DATA = [ + 'componentNodes', + 'appDataSource', + 'sseStreamer', + 'telemetry', + 'cachePool', + 'usageCacheManager', + 'abortControllerPool' +] export const INPUT_PARAMS_TYPE = [ 'asyncOptions', @@ -42,3 +104,13 @@ export const INPUT_PARAMS_TYPE = [ 'folder', 'tabs' ] + +export const LICENSE_QUOTAS = { + // Renew per month + PREDICTIONS_LIMIT: 'quota:predictions', + // Static + FLOWS_LIMIT: 'quota:flows', + USERS_LIMIT: 'quota:users', + STORAGE_LIMIT: 'quota:storage', + ADDITIONAL_SEATS_LIMIT: 'quota:additionalSeats' +} as const diff --git a/packages/server/src/utils/createAttachment.ts b/packages/server/src/utils/createAttachment.ts index 8c9bb9958..10495140d 100644 --- a/packages/server/src/utils/createAttachment.ts +++ b/packages/server/src/utils/createAttachment.ts @@ -12,9 +12,12 @@ import { } from 'flowise-components' import { getRunningExpressApp } from './getRunningExpressApp' import { getErrorMessage } from '../errors/utils' +import { checkStorage, updateStorageUsage } from './quotaUsage' +import { ChatFlow } from '../database/entities/ChatFlow' +import { Workspace } from '../enterprise/database/entities/workspace.entity' +import { Organization } from '../enterprise/database/entities/organization.entity' import { InternalFlowiseError } from '../errors/internalFlowiseError' import { StatusCodes } from 'http-status-codes' -import { ChatFlow } from '../database/entities/ChatFlow' /** * Create attachment @@ -24,17 +27,12 @@ export const createFileAttachment = async (req: Request) => { const appServer = getRunningExpressApp() const chatflowid = req.params.chatflowId + const chatId = req.params.chatId + if (!chatflowid || !isValidUUID(chatflowid)) { throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, 'Invalid chatflowId format - must be a valid UUID') } - - const chatId = req.params.chatId - if (!chatId || !isValidUUID(chatId)) { - throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, 'Invalid chatId format - must be a valid UUID') - } - - // Check for path traversal attempts - if (isPathTraversal(chatflowid) || isPathTraversal(chatId)) { + if (isPathTraversal(chatflowid) || (chatId && isPathTraversal(chatId))) { throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, 'Invalid path characters detected') } @@ -46,28 +44,71 @@ export const createFileAttachment = async (req: Request) => { throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Chatflow ${chatflowid} not found`) } + let orgId = req.user?.activeOrganizationId || '' + let workspaceId = req.user?.activeWorkspaceId || '' + let subscriptionId = req.user?.activeOrganizationSubscriptionId || '' + + // This is one of the WHITELIST_URLS, API can be public and there might be no req.user + if (!orgId || !workspaceId) { + const chatflowWorkspaceId = chatflow.workspaceId + const workspace = await appServer.AppDataSource.getRepository(Workspace).findOneBy({ + id: chatflowWorkspaceId + }) + if (!workspace) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Workspace ${chatflowWorkspaceId} not found`) + } + workspaceId = workspace.id + + const org = await appServer.AppDataSource.getRepository(Organization).findOneBy({ + id: workspace.organizationId + }) + if (!org) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Organization ${workspace.organizationId} not found`) + } + + orgId = org.id + subscriptionId = org.subscriptionId as string + } + // Parse chatbot configuration to get file upload settings let pdfConfig = { usage: 'perPage', legacyBuild: false } + let allowedFileTypes: string[] = [] + let fileUploadEnabled = false if (chatflow.chatbotConfig) { try { const chatbotConfig = JSON.parse(chatflow.chatbotConfig) - if (chatbotConfig?.fullFileUpload?.pdfFile) { - if (chatbotConfig.fullFileUpload.pdfFile.usage) { - pdfConfig.usage = chatbotConfig.fullFileUpload.pdfFile.usage + if (chatbotConfig?.fullFileUpload) { + fileUploadEnabled = chatbotConfig.fullFileUpload.status + + // Get allowed file types from configuration + if (chatbotConfig.fullFileUpload.allowedUploadFileTypes) { + allowedFileTypes = chatbotConfig.fullFileUpload.allowedUploadFileTypes.split(',') } - if (chatbotConfig.fullFileUpload.pdfFile.legacyBuild !== undefined) { - pdfConfig.legacyBuild = chatbotConfig.fullFileUpload.pdfFile.legacyBuild + + // PDF specific configuration + if (chatbotConfig.fullFileUpload.pdfFile) { + if (chatbotConfig.fullFileUpload.pdfFile.usage) { + pdfConfig.usage = chatbotConfig.fullFileUpload.pdfFile.usage + } + if (chatbotConfig.fullFileUpload.pdfFile.legacyBuild !== undefined) { + pdfConfig.legacyBuild = chatbotConfig.fullFileUpload.pdfFile.legacyBuild + } } } } catch (e) { - // Use default PDF config if parsing fails + // Use default config if parsing fails } } + // Check if file upload is enabled + if (!fileUploadEnabled) { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, 'File upload is not enabled for this chatflow') + } + // Find FileLoader node const fileLoaderComponent = appServer.nodesPool.componentNodes['fileLoader'] const fileLoaderNodeInstanceFilePath = fileLoaderComponent.filePath as string @@ -75,6 +116,8 @@ export const createFileAttachment = async (req: Request) => { const fileLoaderNodeInstance = new fileLoaderNodeModule.nodeClass() const options = { retrieveAttachmentChatId: true, + orgId, + workspaceId, chatflowid, chatId } @@ -83,13 +126,37 @@ export const createFileAttachment = async (req: Request) => { if (files.length) { const isBase64 = req.body.base64 for (const file of files) { + if (!allowedFileTypes.length) { + throw new InternalFlowiseError( + StatusCodes.BAD_REQUEST, + `File type '${file.mimetype}' is not allowed. Allowed types: ${allowedFileTypes.join(', ')}` + ) + } + + // Validate file type against allowed types + if (allowedFileTypes.length > 0 && !allowedFileTypes.includes(file.mimetype)) { + throw new InternalFlowiseError( + StatusCodes.BAD_REQUEST, + `File type '${file.mimetype}' is not allowed. Allowed types: ${allowedFileTypes.join(', ')}` + ) + } + + await checkStorage(orgId, subscriptionId, appServer.usageCacheManager) + const fileBuffer = await getFileFromUpload(file.path ?? file.key) const fileNames: string[] = [] - // Address file name with special characters: https://github.com/expressjs/multer/issues/1104 file.originalname = Buffer.from(file.originalname, 'latin1').toString('utf8') - - const storagePath = await addArrayFilesToStorage(file.mimetype, fileBuffer, file.originalname, fileNames, chatflowid, chatId) + const { path: storagePath, totalSize } = await addArrayFilesToStorage( + file.mimetype, + fileBuffer, + file.originalname, + fileNames, + orgId, + chatflowid, + chatId + ) + await updateStorageUsage(orgId, workspaceId, totalSize, appServer.usageCacheManager) const fileInputFieldFromMimeType = mapMimeTypeToInputField(file.mimetype) @@ -137,7 +204,7 @@ export const createFileAttachment = async (req: Request) => { content }) } catch (error) { - throw new Error(`Failed operation: createFileAttachment - ${getErrorMessage(error)}`) + throw new Error(`Failed createFileAttachment: ${file.originalname} (${file.mimetype} - ${getErrorMessage(error)}`) } } } diff --git a/packages/server/src/utils/domainValidation.ts b/packages/server/src/utils/domainValidation.ts new file mode 100644 index 000000000..a2482d8bb --- /dev/null +++ b/packages/server/src/utils/domainValidation.ts @@ -0,0 +1,109 @@ +import { isValidUUID } from 'flowise-components' +import chatflowsService from '../services/chatflows' +import logger from './logger' + +/** + * Validates if the origin is allowed for a specific chatflow + * @param chatflowId - The chatflow ID to validate against + * @param origin - The origin URL to validate + * @param workspaceId - Optional workspace ID for enterprise features + * @returns Promise - True if domain is allowed, false otherwise + */ +async function validateChatflowDomain(chatflowId: string, origin: string, workspaceId?: string): Promise { + try { + if (!chatflowId || !isValidUUID(chatflowId)) { + throw new Error('Invalid chatflowId format - must be a valid UUID') + } + + const chatflow = workspaceId + ? await chatflowsService.getChatflowById(chatflowId, workspaceId) + : await chatflowsService.getChatflowById(chatflowId) + + if (!chatflow?.chatbotConfig) { + return true + } + + const config = JSON.parse(chatflow.chatbotConfig) + + // If no allowed origins configured or first entry is empty, allow all + if (!config.allowedOrigins?.length || config.allowedOrigins[0] === '') { + return true + } + + const originHost = new URL(origin).host + const isAllowed = config.allowedOrigins.some((domain: string) => { + try { + const allowedOrigin = new URL(domain).host + return originHost === allowedOrigin + } catch (error) { + logger.warn(`Invalid domain format in allowedOrigins: ${domain}`) + return false + } + }) + + return isAllowed + } catch (error) { + logger.error(`Error validating domain for chatflow ${chatflowId}:`, error) + return false + } +} + +// NOTE: This function extracts the chatflow ID from a prediction URL. +// It assumes the URL format is /prediction/{chatflowId}. +/** + * Extracts chatflow ID from prediction URL + * @param url - The request URL + * @returns string | null - The chatflow ID or null if not found + */ +function extractChatflowId(url: string): string | null { + try { + const urlParts = url.split('/') + const predictionIndex = urlParts.indexOf('prediction') + + if (predictionIndex !== -1 && urlParts.length > predictionIndex + 1) { + const chatflowId = urlParts[predictionIndex + 1] + // Remove query parameters if present + return chatflowId.split('?')[0] + } + + return null + } catch (error) { + logger.error('Error extracting chatflow ID from URL:', error) + return null + } +} + +/** + * Validates if a request is a prediction request + * @param url - The request URL + * @returns boolean - True if it's a prediction request + */ +function isPredictionRequest(url: string): boolean { + return url.includes('/prediction/') +} + +/** + * Get the custom error message for unauthorized origin + * @param chatflowId - The chatflow ID + * @param workspaceId - Optional workspace ID + * @returns Promise - Custom error message or default + */ +async function getUnauthorizedOriginError(chatflowId: string, workspaceId?: string): Promise { + try { + const chatflow = workspaceId + ? await chatflowsService.getChatflowById(chatflowId, workspaceId) + : await chatflowsService.getChatflowById(chatflowId) + + if (chatflow?.chatbotConfig) { + const config = JSON.parse(chatflow.chatbotConfig) + return config.allowedOriginsError || 'This site is not allowed to access this chatbot' + } + + return 'This site is not allowed to access this chatbot' + } catch (error) { + logger.error(`Error getting unauthorized origin error for chatflow ${chatflowId}:`, error) + return 'This site is not allowed to access this chatbot' + } +} + +export { isPredictionRequest, extractChatflowId, validateChatflowDomain, getUnauthorizedOriginError } diff --git a/packages/server/src/utils/executeCustomNodeFunction.ts b/packages/server/src/utils/executeCustomNodeFunction.ts index a22a0291a..b0da4bf9c 100644 --- a/packages/server/src/utils/executeCustomNodeFunction.ts +++ b/packages/server/src/utils/executeCustomNodeFunction.ts @@ -9,11 +9,15 @@ import { IComponentNodes } from '../Interface' export const executeCustomNodeFunction = async ({ appDataSource, componentNodes, - data + data, + workspaceId, + orgId }: { appDataSource: DataSource componentNodes: IComponentNodes data: any + workspaceId?: string + orgId?: string }) => { try { const body = data @@ -37,7 +41,9 @@ export const executeCustomNodeFunction = async ({ const options: ICommonObject = { appDataSource, - databaseEntities + databaseEntities, + workspaceId, + orgId } const returnData = await newNodeInstance.init(nodeData, '', options) diff --git a/packages/server/src/utils/fileRepository.ts b/packages/server/src/utils/fileRepository.ts index 1147aeb98..2e017c9fd 100644 --- a/packages/server/src/utils/fileRepository.ts +++ b/packages/server/src/utils/fileRepository.ts @@ -1,6 +1,8 @@ import { ChatFlow } from '../database/entities/ChatFlow' import { IReactFlowObject } from '../Interface' import { addBase64FilesToStorage } from 'flowise-components' +import { checkStorage, updateStorageUsage } from './quotaUsage' +import { UsageCacheManager } from '../UsageCacheManager' export const containsBase64File = (chatflow: ChatFlow) => { const parsedFlowData: IReactFlowObject = JSON.parse(chatflow.flowData) @@ -46,11 +48,19 @@ export const containsBase64File = (chatflow: ChatFlow) => { return found } -export const updateFlowDataWithFilePaths = async (chatflowid: string, flowData: string) => { +export const updateFlowDataWithFilePaths = async ( + chatflowid: string, + flowData: string, + orgId: string, + workspaceId: string, + subscriptionId: string, + usageCacheManager: UsageCacheManager +) => { try { const parsedFlowData: IReactFlowObject = JSON.parse(flowData) const re = new RegExp('^data.*;base64', 'i') const nodes = parsedFlowData.nodes + for (let j = 0; j < nodes.length; j++) { const node = nodes[j] if (node.data.category !== 'Document Loaders') { @@ -75,21 +85,26 @@ export const updateFlowDataWithFilePaths = async (chatflowid: string, flowData: for (let j = 0; j < files.length; j++) { const file = files[j] if (re.test(file)) { - node.data.inputs[key] = await addBase64FilesToStorage(file, chatflowid, fileNames) + await checkStorage(orgId, subscriptionId, usageCacheManager) + const { path, totalSize } = await addBase64FilesToStorage(file, chatflowid, fileNames, orgId) + node.data.inputs[key] = path + await updateStorageUsage(orgId, workspaceId, totalSize, usageCacheManager) } } } catch (e) { continue } } else if (re.test(input)) { - node.data.inputs[key] = await addBase64FilesToStorage(input, chatflowid, fileNames) + await checkStorage(orgId, subscriptionId, usageCacheManager) + const { path, totalSize } = await addBase64FilesToStorage(input, chatflowid, fileNames, orgId) + node.data.inputs[key] = path + await updateStorageUsage(orgId, workspaceId, totalSize, usageCacheManager) } } } } - return JSON.stringify(parsedFlowData) - } catch (e) { - return '' + } catch (e: any) { + throw new Error(`Error updating flow data with file paths: ${e.message}`) } } diff --git a/packages/server/src/utils/getChatMessage.ts b/packages/server/src/utils/getChatMessage.ts index 9d8726f0e..57b835f9e 100644 --- a/packages/server/src/utils/getChatMessage.ts +++ b/packages/server/src/utils/getChatMessage.ts @@ -2,8 +2,8 @@ import { MoreThanOrEqual, LessThanOrEqual, Between, In } from 'typeorm' import { ChatMessageRatingType, ChatType } from '../Interface' import { ChatMessage } from '../database/entities/ChatMessage' import { ChatMessageFeedback } from '../database/entities/ChatMessageFeedback' +import { ChatFlow } from '../database/entities/ChatFlow' import { getRunningExpressApp } from '../utils/getRunningExpressApp' -import { aMonthAgo } from '.' /** * Method that get chat messages. @@ -18,6 +18,7 @@ import { aMonthAgo } from '.' * @param {boolean} feedback * @param {ChatMessageRatingType[]} feedbackTypes */ + interface GetChatMessageParams { chatflowid: string chatTypes?: ChatType[] @@ -30,6 +31,9 @@ interface GetChatMessageParams { messageId?: string feedback?: boolean feedbackTypes?: ChatMessageRatingType[] + activeWorkspaceId?: string + page?: number + pageSize?: number } export const utilGetChatMessage = async ({ @@ -43,65 +47,49 @@ export const utilGetChatMessage = async ({ endDate, messageId, feedback, - feedbackTypes + feedbackTypes, + activeWorkspaceId, + page = -1, + pageSize = -1 }: GetChatMessageParams): Promise => { + if (!page) page = -1 + if (!pageSize) pageSize = -1 + const appServer = getRunningExpressApp() + // Check if chatflow workspaceId is same as activeWorkspaceId + if (activeWorkspaceId) { + const chatflow = await appServer.AppDataSource.getRepository(ChatFlow).findOneBy({ + id: chatflowid, + workspaceId: activeWorkspaceId + }) + if (!chatflow) { + throw new Error('Unauthorized access') + } + } else { + throw new Error('Unauthorized access') + } + if (feedback) { - const query = await appServer.AppDataSource.getRepository(ChatMessage).createQueryBuilder('chat_message') - - // do the join with chat message feedback based on messageId for each chat message in the chatflow - query - .leftJoinAndSelect('chat_message.execution', 'execution') - .leftJoinAndMapOne('chat_message.feedback', ChatMessageFeedback, 'feedback', 'feedback.messageId = chat_message.id') - .where('chat_message.chatflowid = :chatflowid', { chatflowid }) - - // based on which parameters are available add `andWhere` clauses to the query - if (chatTypes && chatTypes.length > 0) { - query.andWhere('chat_message.chatType IN (:...chatTypes)', { chatTypes }) - } - if (chatId) { - query.andWhere('chat_message.chatId = :chatId', { chatId }) - } - if (memoryType) { - query.andWhere('chat_message.memoryType = :memoryType', { memoryType }) - } - if (sessionId) { - query.andWhere('chat_message.sessionId = :sessionId', { sessionId }) - } - - // set date range - if (startDate) { - query.andWhere('chat_message.createdDate >= :startDateTime', { startDateTime: startDate ? new Date(startDate) : aMonthAgo() }) - } - if (endDate) { - query.andWhere('chat_message.createdDate <= :endDateTime', { endDateTime: endDate ? new Date(endDate) : new Date() }) - } - - // sort - query.orderBy('chat_message.createdDate', sortOrder === 'DESC' ? 'DESC' : 'ASC') - - const messages = (await query.getMany()) as Array - - if (feedbackTypes && feedbackTypes.length > 0) { - // just applying a filter to the messages array will only return the messages that have feedback, - // but we also want the message before the feedback message which is the user message. - const indicesToKeep = new Set() - - messages.forEach((message, index) => { - if (message.role === 'apiMessage' && message.feedback && feedbackTypes.includes(message.feedback.rating)) { - if (index > 0) indicesToKeep.add(index - 1) - indicesToKeep.add(index) - } - }) - - return messages.filter((_, index) => indicesToKeep.has(index)) - } - - return messages + // Handle feedback queries with improved efficiency + return await handleFeedbackQuery({ + chatflowid, + chatTypes, + sortOrder, + chatId, + memoryType, + sessionId, + startDate, + endDate, + messageId, + feedbackTypes, + page, + pageSize + }) } let createdDateQuery + if (startDate || endDate) { if (startDate && endDate) { createdDateQuery = Between(new Date(startDate), new Date(endDate)) @@ -112,7 +100,7 @@ export const utilGetChatMessage = async ({ } } - return await appServer.AppDataSource.getRepository(ChatMessage).find({ + const messages = await appServer.AppDataSource.getRepository(ChatMessage).find({ where: { chatflowid, chatType: chatTypes?.length ? In(chatTypes) : undefined, @@ -129,4 +117,229 @@ export const utilGetChatMessage = async ({ createdDate: sortOrder === 'DESC' ? 'DESC' : 'ASC' } }) + + return messages +} + +async function handleFeedbackQuery(params: { + chatflowid: string + chatTypes?: ChatType[] + sortOrder: string + chatId?: string + memoryType?: string + sessionId?: string + startDate?: string + endDate?: string + messageId?: string + feedbackTypes?: ChatMessageRatingType[] + page: number + pageSize: number +}): Promise { + const { + chatflowid, + chatTypes, + sortOrder, + chatId, + memoryType, + sessionId, + startDate, + endDate, + messageId, + feedbackTypes, + page, + pageSize + } = params + + const appServer = getRunningExpressApp() + + // For specific session/message queries, no pagination needed + if (sessionId || messageId) { + return await getMessagesWithFeedback(params, false) + } + + // For paginated queries, handle session-based pagination efficiently + if (page > -1 && pageSize > -1) { + // First get session IDs with pagination + const sessionQuery = appServer.AppDataSource.getRepository(ChatMessage) + .createQueryBuilder('chat_message') + .select('chat_message.sessionId', 'sessionId') + .where('chat_message.chatflowid = :chatflowid', { chatflowid }) + + // Apply basic filters + if (chatTypes && chatTypes.length > 0) { + sessionQuery.andWhere('chat_message.chatType IN (:...chatTypes)', { chatTypes }) + } + if (chatId) { + sessionQuery.andWhere('chat_message.chatId = :chatId', { chatId }) + } + if (memoryType) { + sessionQuery.andWhere('chat_message.memoryType = :memoryType', { memoryType }) + } + if (startDate && typeof startDate === 'string') { + sessionQuery.andWhere('chat_message.createdDate >= :startDateTime', { + startDateTime: new Date(startDate) + }) + } + if (endDate && typeof endDate === 'string') { + sessionQuery.andWhere('chat_message.createdDate <= :endDateTime', { + endDateTime: new Date(endDate) + }) + } + + // If feedback types are specified, only get sessions with those feedback types + if (feedbackTypes && feedbackTypes.length > 0) { + sessionQuery + .leftJoin(ChatMessageFeedback, 'feedback', 'feedback.messageId = chat_message.id') + .andWhere('feedback.rating IN (:...feedbackTypes)', { feedbackTypes }) + } + + const startIndex = pageSize * (page - 1) + const sessionIds = await sessionQuery + .orderBy('MAX(chat_message.createdDate)', sortOrder === 'DESC' ? 'DESC' : 'ASC') + .groupBy('chat_message.sessionId') + .offset(startIndex) + .limit(pageSize) + .getRawMany() + + if (sessionIds.length === 0) { + return [] + } + + // Get all messages for these sessions + const sessionIdList = sessionIds.map((s) => s.sessionId) + return await getMessagesWithFeedback( + { + ...params, + sessionId: undefined // Clear specific sessionId since we're using list + }, + true, + sessionIdList + ) + } + + // No pagination - get all feedback messages + return await getMessagesWithFeedback(params, false) +} + +async function getMessagesWithFeedback( + params: { + chatflowid: string + chatTypes?: ChatType[] + sortOrder: string + chatId?: string + memoryType?: string + sessionId?: string + startDate?: string + endDate?: string + messageId?: string + feedbackTypes?: ChatMessageRatingType[] + }, + useSessionList: boolean = false, + sessionIdList?: string[] +): Promise { + const { chatflowid, chatTypes, sortOrder, chatId, memoryType, sessionId, startDate, endDate, messageId, feedbackTypes } = params + + const appServer = getRunningExpressApp() + const query = appServer.AppDataSource.getRepository(ChatMessage).createQueryBuilder('chat_message') + + query + .leftJoinAndSelect('chat_message.execution', 'execution') + .leftJoinAndMapOne('chat_message.feedback', ChatMessageFeedback, 'feedback', 'feedback.messageId = chat_message.id') + .where('chat_message.chatflowid = :chatflowid', { chatflowid }) + + // Apply filters + if (useSessionList && sessionIdList && sessionIdList.length > 0) { + query.andWhere('chat_message.sessionId IN (:...sessionIds)', { sessionIds: sessionIdList }) + } + + if (chatTypes && chatTypes.length > 0) { + query.andWhere('chat_message.chatType IN (:...chatTypes)', { chatTypes }) + } + if (chatId) { + query.andWhere('chat_message.chatId = :chatId', { chatId }) + } + if (memoryType) { + query.andWhere('chat_message.memoryType = :memoryType', { memoryType }) + } + if (sessionId) { + query.andWhere('chat_message.sessionId = :sessionId', { sessionId }) + } + if (messageId) { + query.andWhere('chat_message.id = :messageId', { messageId }) + } + if (startDate && typeof startDate === 'string') { + query.andWhere('chat_message.createdDate >= :startDateTime', { + startDateTime: new Date(startDate) + }) + } + if (endDate && typeof endDate === 'string') { + query.andWhere('chat_message.createdDate <= :endDateTime', { + endDateTime: new Date(endDate) + }) + } + + // Pre-filter by feedback types if specified (more efficient than post-processing) + if (feedbackTypes && feedbackTypes.length > 0) { + query.andWhere('(feedback.rating IN (:...feedbackTypes) OR feedback.rating IS NULL)', { feedbackTypes }) + } + + query.orderBy('chat_message.createdDate', sortOrder === 'DESC' ? 'DESC' : 'ASC') + + const messages = (await query.getMany()) as Array + + // Apply feedback type filtering with previous message inclusion + if (feedbackTypes && feedbackTypes.length > 0) { + return filterMessagesWithFeedback(messages, feedbackTypes) + } + + return messages +} + +function filterMessagesWithFeedback( + messages: Array, + feedbackTypes: ChatMessageRatingType[] +): ChatMessage[] { + // Group messages by session for proper filtering + const sessionGroups = new Map>() + + messages.forEach((message) => { + const sessionId = message.sessionId + if (!sessionId) return // Skip messages without sessionId + + if (!sessionGroups.has(sessionId)) { + sessionGroups.set(sessionId, []) + } + sessionGroups.get(sessionId)!.push(message) + }) + + const result: ChatMessage[] = [] + + // Process each session group + sessionGroups.forEach((sessionMessages) => { + // Sort by creation date to ensure proper order + sessionMessages.sort((a, b) => new Date(a.createdDate).getTime() - new Date(b.createdDate).getTime()) + + const toInclude = new Set() + + sessionMessages.forEach((message, index) => { + if (message.role === 'apiMessage' && message.feedback && feedbackTypes.includes(message.feedback.rating)) { + // Include the feedback message + toInclude.add(index) + // Include the previous message (user message) if it exists + if (index > 0) { + toInclude.add(index - 1) + } + } + }) + + // Add filtered messages to result + sessionMessages.forEach((message, index) => { + if (toInclude.has(index)) { + result.push(message) + } + }) + }) + + // Sort final result by creation date + return result.sort((a, b) => new Date(a.createdDate).getTime() - new Date(b.createdDate).getTime()) } diff --git a/packages/server/src/utils/index.ts b/packages/server/src/utils/index.ts index 48303a911..f0b1b35df 100644 --- a/packages/server/src/utils/index.ts +++ b/packages/server/src/utils/index.ts @@ -5,6 +5,7 @@ import path from 'path' import fs from 'fs' import logger from './logger' +import { v4 as uuidv4 } from 'uuid' import { IChatFlow, IComponentCredentials, @@ -68,6 +69,8 @@ export const QUESTION_VAR_PREFIX = 'question' export const FILE_ATTACHMENT_PREFIX = 'file_attachment' export const CHAT_HISTORY_VAR_PREFIX = 'chat_history' export const RUNTIME_MESSAGES_LENGTH_VAR_PREFIX = 'runtime_messages_length' +export const LOOP_COUNT_VAR_PREFIX = 'loop_count' +export const CURRENT_DATE_TIME_VAR_PREFIX = 'current_date_time' export const REDACTED_CREDENTIAL_VALUE = '_FLOWISE_BLANK_07167752-1a71-43b1-bf8f-4f32252165db' let secretsManagerClient: SecretsManagerClient | null = null @@ -203,6 +206,22 @@ export const constructGraphs = ( return { graph, nodeDependencies } } +/** + * Get starting node and check if flow is valid + * @param {INodeDependencies} nodeDependencies + */ +export const getStartingNode = (nodeDependencies: INodeDependencies) => { + // Find starting node + const startingNodeIds = [] as string[] + Object.keys(nodeDependencies).forEach((nodeId) => { + if (nodeDependencies[nodeId] === 0) { + startingNodeIds.push(nodeId) + } + }) + + return { startingNodeIds } +} + /** * Get starting nodes and check if flow is valid * @param {INodeDependencies} graph @@ -239,22 +258,6 @@ export const getStartingNodes = (graph: INodeDirectedGraph, endNodeId: string) = return { startingNodeIds, depthQueue: depthQueueReversed } } -/** - * Get starting node and check if flow is valid - * @param {INodeDependencies} nodeDependencies - */ -export const getStartingNode = (nodeDependencies: INodeDependencies) => { - // Find starting node - const startingNodeIds = [] as string[] - Object.keys(nodeDependencies).forEach((nodeId) => { - if (nodeDependencies[nodeId] === 0) { - startingNodeIds.push(nodeId) - } - }) - - return { startingNodeIds } -} - /** * Get all connected nodes from startnode * @param {INodeDependencies} graph @@ -497,7 +500,13 @@ type BuildFlowParams = { stopNodeId?: string uploads?: IFileUpload[] baseURL?: string + orgId?: string + workspaceId?: string + subscriptionId?: string + usageCacheManager?: any uploadedFilesContent?: string + updateStorageUsage?: (orgId: string, workspaceId: string, totalSize: number, usageCacheManager?: any) => void + checkStorage?: (orgId: string, subscriptionId: string, usageCacheManager: any) => Promise } /** @@ -528,7 +537,13 @@ export const buildFlow = async ({ isUpsert, stopNodeId, uploads, - baseURL + baseURL, + orgId, + workspaceId, + subscriptionId, + usageCacheManager, + updateStorageUsage, + checkStorage }: BuildFlowParams) => { const flowNodes = cloneDeep(reactFlowNodes) @@ -591,8 +606,11 @@ export const buildFlow = async ({ ) if (isUpsert && stopNodeId && nodeId === stopNodeId) { - logger.debug(`[server]: Upserting ${reactFlowNode.data.label} (${reactFlowNode.data.id})`) + logger.debug(`[server]: [${orgId}]: Upserting ${reactFlowNode.data.label} (${reactFlowNode.data.id})`) const indexResult = await newNodeInstance.vectorStoreMethods!['upsert']!.call(newNodeInstance, reactFlowNodeData, { + orgId, + workspaceId, + subscriptionId, chatId, sessionId, chatflowid, @@ -602,12 +620,13 @@ export const buildFlow = async ({ appDataSource, databaseEntities, cachePool, + usageCacheManager, dynamicVariables, uploads, baseURL }) if (indexResult) upsertHistory['result'] = indexResult - logger.debug(`[server]: Finished upserting ${reactFlowNode.data.label} (${reactFlowNode.data.id})`) + logger.debug(`[server]: [${orgId}]: Finished upserting ${reactFlowNode.data.label} (${reactFlowNode.data.id})`) break } else if ( !isUpsert && @@ -616,9 +635,12 @@ export const buildFlow = async ({ ) { initializedNodes.add(nodeId) } else { - logger.debug(`[server]: Initializing ${reactFlowNode.data.label} (${reactFlowNode.data.id})`) + logger.debug(`[server]: [${orgId}]: Initializing ${reactFlowNode.data.label} (${reactFlowNode.data.id})`) const finalQuestion = uploadedFilesContent ? `${uploadedFilesContent}\n\n${question}` : question let outputResult = await newNodeInstance.init(reactFlowNodeData, finalQuestion, { + orgId, + workspaceId, + subscriptionId, chatId, sessionId, chatflowid, @@ -627,11 +649,14 @@ export const buildFlow = async ({ appDataSource, databaseEntities, cachePool, + usageCacheManager, isUpsert, dynamicVariables, uploads, baseURL, - componentNodes: componentNodes as ICommonObject + componentNodes, + updateStorageUsage, + checkStorage }) // Save dynamic variables @@ -676,11 +701,11 @@ export const buildFlow = async ({ flowNodes[nodeIndex].data.instance = outputResult - logger.debug(`[server]: Finished initializing ${reactFlowNode.data.label} (${reactFlowNode.data.id})`) + logger.debug(`[server]: [${orgId}]: Finished initializing ${reactFlowNode.data.label} (${reactFlowNode.data.id})`) initializedNodes.add(reactFlowNode.data.id) } } catch (e: any) { - logger.error(e) + logger.error(`[server]: [${orgId}]:`, e) throw new Error(e) } @@ -744,6 +769,7 @@ export const clearSessionMemory = async ( componentNodes: IComponentNodes, chatId: string, appDataSource: DataSource, + orgId?: string, sessionId?: string, memoryType?: string, isClearFromViewMessageDialog?: string @@ -757,7 +783,7 @@ export const clearSessionMemory = async ( const nodeInstanceFilePath = componentNodes[node.data.name].filePath as string const nodeModule = await import(nodeInstanceFilePath) const newNodeInstance = new nodeModule.nodeClass() - const options: ICommonObject = { chatId, appDataSource, databaseEntities, logger } + const options: ICommonObject = { orgId, chatId, appDataSource, databaseEntities, logger } // SessionId always take priority first because it is the sessionId used for 3rd party memory node if (sessionId && node.data.inputs) { @@ -808,7 +834,8 @@ export const getGlobalVariable = async ( value: overrideConfig.vars[propertyName], id: '', updatedDate: new Date(), - createdDate: new Date() + createdDate: new Date(), + workspaceId: '' }) } } @@ -990,7 +1017,7 @@ export const getVariableValue = async ( } /** - * Loop through each inputs and resolve variable if neccessary + * Loop through each inputs and resolve variable if necessary * @param {INodeData} reactFlowNodeData * @param {IReactFlowNode[]} reactFlowNodes * @param {string} question @@ -1080,12 +1107,13 @@ export const replaceInputsWithConfig = ( * Several conditions: * 1. If config is 'analytics', always allow it * 2. If config is 'vars', check its object and filter out the variables that are not enabled for override - * 3. If typeof config's value is an object, check if the node id is in the overrideConfig object and if the parameter (systemMessagePrompt) is enabled + * 3. If typeof config's value is an array, check if the parameter is enabled and apply directly + * 4. If typeof config's value is an object, check if the node id is in the overrideConfig object and if the parameter (systemMessagePrompt) is enabled * Example: * "systemMessagePrompt": { * "chatPromptTemplate_0": "You are an assistant" * } - * 4. If typeof config's value is a string, check if the parameter is enabled + * 5. If typeof config's value is a string, check if the parameter is enabled * Example: * "systemMessagePrompt": "You are an assistant" */ @@ -1106,12 +1134,50 @@ export const replaceInputsWithConfig = ( } overrideConfig[config] = filteredVars } + } else if (Array.isArray(overrideConfig[config])) { + // Handle arrays as direct parameter values + if (isParameterEnabled(flowNodeData.label, config)) { + // If existing value is also an array, concatenate; otherwise replace + const existingValue = inputsObj[config] + if (Array.isArray(existingValue)) { + inputsObj[config] = [...new Set([...existingValue, ...overrideConfig[config]])] + } else { + inputsObj[config] = overrideConfig[config] + } + } + continue } else if (overrideConfig[config] && typeof overrideConfig[config] === 'object') { const nodeIds = Object.keys(overrideConfig[config]) if (nodeIds.includes(flowNodeData.id)) { // Check if this parameter is enabled if (isParameterEnabled(flowNodeData.label, config)) { - inputsObj[config] = overrideConfig[config][flowNodeData.id] + const existingValue = inputsObj[config] + const overrideValue = overrideConfig[config][flowNodeData.id] + + // Merge objects instead of completely overriding + if ( + typeof existingValue === 'object' && + typeof overrideValue === 'object' && + !Array.isArray(existingValue) && + !Array.isArray(overrideValue) && + existingValue !== null && + overrideValue !== null + ) { + inputsObj[config] = Object.assign({}, existingValue, overrideValue) + } else if (typeof existingValue === 'string' && existingValue.startsWith('{') && existingValue.endsWith('}')) { + try { + const parsedExisting = JSON.parse(existingValue) + if (typeof overrideValue === 'object' && !Array.isArray(overrideValue)) { + inputsObj[config] = Object.assign({}, parsedExisting, overrideValue) + } else { + inputsObj[config] = overrideValue + } + } catch (e) { + inputsObj[config] = overrideValue + } + } else { + inputsObj[config] = overrideValue + } } continue } else if (nodeIds.some((nodeId) => nodeId.includes(flowNodeData.name))) { @@ -1136,24 +1202,36 @@ export const replaceInputsWithConfig = ( const overrideConfigValue = overrideConfig[config] if (overrideConfigValue) { if (typeof overrideConfigValue === 'object') { - switch (typeof paramValue) { - case 'string': - if (paramValue.startsWith('{') && paramValue.endsWith('}')) { - try { - paramValue = Object.assign({}, JSON.parse(paramValue), overrideConfigValue) - break - } catch (e) { - // ignore + // Handle arrays specifically - concatenate instead of replace + if (Array.isArray(overrideConfigValue) && Array.isArray(paramValue)) { + paramValue = [...new Set([...paramValue, ...overrideConfigValue])] + } else if (Array.isArray(overrideConfigValue)) { + paramValue = overrideConfigValue + } else { + switch (typeof paramValue) { + case 'string': + if (paramValue.startsWith('{') && paramValue.endsWith('}')) { + try { + paramValue = Object.assign({}, JSON.parse(paramValue), overrideConfigValue) + break + } catch (e) { + // ignore + } } - } - paramValue = overrideConfigValue - break - case 'object': - paramValue = Object.assign({}, paramValue, overrideConfigValue) - break - default: - paramValue = overrideConfigValue - break + paramValue = overrideConfigValue + break + case 'object': + // Make sure we're not dealing with arrays here + if (!Array.isArray(paramValue)) { + paramValue = Object.assign({}, paramValue, overrideConfigValue) + } else { + paramValue = overrideConfigValue + } + break + default: + paramValue = overrideConfigValue + break + } } } else { paramValue = overrideConfigValue @@ -1261,7 +1339,6 @@ export const findAvailableConfigs = (reactFlowNodes: IReactFlowNode[], component for (const flowNode of reactFlowNodes) { for (const inputParam of flowNode.data.inputParams) { let obj: IOverrideConfig | undefined - if (inputParam.type === 'file') { obj = { node: flowNode.data.label, @@ -1303,11 +1380,10 @@ export const findAvailableConfigs = (reactFlowNodes: IReactFlowNode[], component } continue } else if (inputParam.type === 'array') { - // get array item schema const arrayItem = inputParam.array if (Array.isArray(arrayItem)) { - const arraySchema = [] - // Each array item is a field definition + const arrayItemSchema: Record = {} + // Build object schema representing the structure of each array item for (const item of arrayItem) { let itemType = item.type if (itemType === 'options') { @@ -1316,10 +1392,7 @@ export const findAvailableConfigs = (reactFlowNodes: IReactFlowNode[], component } else if (itemType === 'file') { itemType = item.fileType ?? item.type } - arraySchema.push({ - name: item.name, - type: itemType - }) + arrayItemSchema[item.name] = itemType } obj = { node: flowNode.data.label, @@ -1327,7 +1400,49 @@ export const findAvailableConfigs = (reactFlowNodes: IReactFlowNode[], component label: inputParam.label, name: inputParam.name, type: inputParam.type, - schema: arraySchema + schema: arrayItemSchema + } + } + } else if (inputParam.loadConfig) { + const configData = flowNode?.data?.inputs?.[`${inputParam.name}Config`] + if (configData) { + // Parse config data to extract schema + let parsedConfig: any = {} + try { + parsedConfig = typeof configData === 'string' ? JSON.parse(configData) : configData + } catch (e) { + // If parsing fails, treat as empty object + parsedConfig = {} + } + + // Generate schema from config structure + const configSchema: Record = {} + parsedConfig = _removeCredentialId(parsedConfig) + for (const key in parsedConfig) { + if (key === inputParam.name) continue + const value = parsedConfig[key] + let fieldType = 'string' // default type + + if (typeof value === 'boolean') { + fieldType = 'boolean' + } else if (typeof value === 'number') { + fieldType = 'number' + } else if (Array.isArray(value)) { + fieldType = 'array' + } else if (typeof value === 'object' && value !== null) { + fieldType = 'object' + } + + configSchema[key] = fieldType + } + + obj = { + node: flowNode.data.label, + nodeId: flowNode.data.id, + label: `${inputParam.label} Config`, + name: `${inputParam.name}Config`, + type: `json`, + schema: configSchema } } } else { @@ -1377,7 +1492,9 @@ export const isFlowValidForStream = (reactFlowNodes: IReactFlowNode[], endingNod 'chatTogetherAI', 'chatTogetherAI_LlamaIndex', 'chatFireworks', - 'chatBaiduWenxin' + 'ChatSambanova', + 'chatBaiduWenxin', + 'chatCometAPI' ], LLMs: ['azureOpenAI', 'openAI', 'ollama'] } @@ -1500,7 +1617,6 @@ export const decryptCredentialData = async ( if (USE_AWS_SECRETS_MANAGER && secretsManagerClient) { try { - logger.info(`[server]: Reading AWS Secret: ${encryptedData}`) if (encryptedData.startsWith('FlowiseCredential_')) { const command = new GetSecretValueCommand({ SecretId: encryptedData }) const response = await secretsManagerClient.send(command) @@ -1567,6 +1683,10 @@ export const transformToCredentialEntity = async (body: ICredentialReqBody): Pro const newCredential = new Credential() Object.assign(newCredential, credentialBody) + if (body.workspaceId) { + newCredential.workspaceId = body.workspaceId + } + return newCredential } @@ -1734,21 +1854,6 @@ export const getTelemetryFlowObj = (nodes: IReactFlowNode[], edges: IReactFlowEd return { nodes: nodeData, edges: edgeData } } -/** - * Get user settings file - * TODO: move env variables to settings json file, easier configuration - */ -export const getUserSettingsFilePath = () => { - if (process.env.SECRETKEY_PATH) return path.join(process.env.SECRETKEY_PATH, 'settings.json') - const checkPaths = [path.join(getUserHome(), '.flowise', 'settings.json')] - for (const checkPath of checkPaths) { - if (fs.existsSync(checkPath)) { - return checkPath - } - } - return '' -} - /** * Get app current version */ @@ -1815,14 +1920,8 @@ export const getUploadPath = (): string => { : path.join(getUserHome(), '.flowise', 'uploads') } -const getOrgId = () => { - const settingsContent = fs.readFileSync(getUserSettingsFilePath(), 'utf8') - try { - const settings = JSON.parse(settingsContent) - return settings.instanceId - } catch (error) { - return '' - } +export function generateId() { + return uuidv4() } export const getMulterStorage = () => { @@ -1837,10 +1936,10 @@ export const getMulterStorage = () => { s3: s3Client, bucket: Bucket, metadata: function (req, file, cb) { - cb(null, { fieldName: file.fieldname, originalName: file.originalname, orgId: getOrgId() }) + cb(null, { fieldName: file.fieldname, originalName: file.originalname }) }, key: function (req, file, cb) { - cb(null, `${getOrgId()}/${Date.now().toString()}`) + cb(null, `${generateId()}`) } }) }) @@ -1852,7 +1951,7 @@ export const getMulterStorage = () => { bucket: process.env.GOOGLE_CLOUD_STORAGE_BUCKET_NAME, keyFilename: process.env.GOOGLE_CLOUD_STORAGE_CREDENTIAL, uniformBucketLevelAccess: Boolean(process.env.GOOGLE_CLOUD_UNIFORM_BUCKET_ACCESS) ?? true, - destination: `uploads/${getOrgId()}` + destination: `uploads/${generateId()}` }) }) } else { @@ -1926,3 +2025,48 @@ export const getAllNodesInPath = (startNode: string, graph: INodeDirectedGraph): return Array.from(nodes) } + +export const _removeCredentialId = (obj: any): any => { + if (!obj || typeof obj !== 'object') return obj + + if (Array.isArray(obj)) { + return obj.map((item) => _removeCredentialId(item)) + } + + const newObj: Record = {} + for (const [key, value] of Object.entries(obj)) { + if (key === 'FLOWISE_CREDENTIAL_ID') continue + newObj[key] = _removeCredentialId(value) + } + return newObj +} + +/** + * Validates that history items follow the expected schema + * @param {any[]} history - Array of history items to validate + * @returns {boolean} - True if all items are valid, false otherwise + */ +export const validateHistorySchema = (history: any[]): boolean => { + if (!Array.isArray(history)) { + return false + } + + return history.every((item) => { + // Check if item is an object + if (typeof item !== 'object' || item === null) { + return false + } + + // Check if role exists and is valid + if (typeof item.role !== 'string' || !['apiMessage', 'userMessage'].includes(item.role)) { + return false + } + + // Check if content exists and is a string + if (typeof item.content !== 'string') { + return false + } + + return true + }) +} diff --git a/packages/server/src/utils/logger.ts b/packages/server/src/utils/logger.ts index 7ad5b58a4..a4b060a7d 100644 --- a/packages/server/src/utils/logger.ts +++ b/packages/server/src/utils/logger.ts @@ -4,6 +4,7 @@ import { hostname } from 'node:os' import config from './config' // should be replaced by node-config or similar import { createLogger, transports, format } from 'winston' import { NextFunction, Request, Response } from 'express' +import DailyRotateFile from 'winston-daily-rotate-file' import { S3ClientConfig } from '@aws-sdk/client-s3' import { LoggingWinston } from '@google-cloud/logging-winston' @@ -19,6 +20,8 @@ let gcsServerStream: any let gcsErrorStream: any let gcsServerReqStream: any +let requestLogger: any + if (process.env.STORAGE_TYPE === 's3') { const accessKeyId = process.env.S3_STORAGE_ACCESS_KEY_ID const secretAccessKey = process.env.S3_STORAGE_SECRET_ACCESS_KEY @@ -27,17 +30,21 @@ if (process.env.STORAGE_TYPE === 's3') { const customURL = process.env.S3_ENDPOINT_URL const forcePathStyle = process.env.S3_FORCE_PATH_STYLE === 'true' - if (!region || !s3Bucket) { + if (!region || region.trim() === '' || !s3Bucket || s3Bucket.trim() === '') { throw new Error('S3 storage configuration is missing') } const s3Config: S3ClientConfig = { region: region, - endpoint: customURL, forcePathStyle: forcePathStyle } - if (accessKeyId && secretAccessKey) { + // Only include endpoint if customURL is not empty + if (customURL && customURL.trim() !== '') { + s3Config.endpoint = customURL + } + + if (accessKeyId && accessKeyId.trim() !== '' && secretAccessKey && secretAccessKey.trim() !== '') { s3Config.credentials = { accessKeyId: accessKeyId, secretAccessKey: secretAccessKey @@ -110,17 +117,16 @@ const logger = createLogger({ defaultMeta: { package: 'server' }, + exitOnError: false, transports: [ new transports.Console(), ...(!process.env.STORAGE_TYPE || process.env.STORAGE_TYPE === 'local' ? [ - new transports.File({ - filename: path.join(logDir, config.logging.server.filename ?? 'server.log'), + new DailyRotateFile({ + filename: path.join(logDir, config.logging.server.filename ?? 'server-%DATE%.log'), + datePattern: 'YYYY-MM-DD-HH', + maxSize: '20m', level: config.logging.server.level ?? 'info' - }), - new transports.File({ - filename: path.join(logDir, config.logging.server.errorFilename ?? 'server-error.log'), - level: 'error' // Log only errors to this file }) ] : []), @@ -134,13 +140,7 @@ const logger = createLogger({ ...(process.env.STORAGE_TYPE === 'gcs' ? [gcsServerStream] : []) ], exceptionHandlers: [ - ...(!process.env.STORAGE_TYPE || process.env.STORAGE_TYPE === 'local' - ? [ - new transports.File({ - filename: path.join(logDir, config.logging.server.errorFilename ?? 'server-error.log') - }) - ] - : []), + ...(process.env.DEBUG && process.env.DEBUG === 'true' ? [new transports.Console()] : []), ...(process.env.STORAGE_TYPE === 's3' ? [ new transports.Stream({ @@ -151,13 +151,7 @@ const logger = createLogger({ ...(process.env.STORAGE_TYPE === 'gcs' ? [gcsErrorStream] : []) ], rejectionHandlers: [ - ...(!process.env.STORAGE_TYPE || process.env.STORAGE_TYPE === 'local' - ? [ - new transports.File({ - filename: path.join(logDir, config.logging.server.errorFilename ?? 'server-error.log') - }) - ] - : []), + ...(process.env.DEBUG && process.env.DEBUG === 'true' ? [new transports.Console()] : []), ...(process.env.STORAGE_TYPE === 's3' ? [ new transports.Stream({ @@ -165,45 +159,106 @@ const logger = createLogger({ }) ] : []), - ...(process.env.STORAGE_TYPE === 'gcs' ? [gcsErrorStream] : []) + ...(process.env.STORAGE_TYPE === 'gcs' ? [gcsErrorStream] : []), + // Always provide a fallback rejection handler when no other handlers are configured + ...((!process.env.DEBUG || process.env.DEBUG !== 'true') && process.env.STORAGE_TYPE !== 's3' && process.env.STORAGE_TYPE !== 'gcs' + ? [new transports.Console()] + : []) ] }) +requestLogger = createLogger({ + format: combine(timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }), format.json(), errors({ stack: true })), + defaultMeta: { + package: 'server' + }, + transports: [ + ...(process.env.DEBUG && process.env.DEBUG === 'true' ? [new transports.Console()] : []), + ...(!process.env.STORAGE_TYPE || process.env.STORAGE_TYPE === 'local' + ? [ + new transports.File({ + filename: path.join(logDir, config.logging.express.filename ?? 'server-requests.log.jsonl'), + level: config.logging.express.level ?? 'debug' + }) + ] + : []), + ...(process.env.STORAGE_TYPE === 's3' + ? [ + new transports.Stream({ + stream: s3ServerReqStream + }) + ] + : []), + ...(process.env.STORAGE_TYPE === 'gcs' ? [gcsServerReqStream] : []) + ] +}) + +function getSensitiveBodyFields(): string[] { + if (!process.env.LOG_SANITIZE_BODY_FIELDS) return [] + return (process.env.LOG_SANITIZE_BODY_FIELDS as string) + .toLowerCase() + .split(',') + .map((f) => f.trim()) +} + +function getSensitiveHeaderFields(): string[] { + if (!process.env.LOG_SANITIZE_HEADER_FIELDS) return [] + return (process.env.LOG_SANITIZE_HEADER_FIELDS as string) + .toLowerCase() + .split(',') + .map((f) => f.trim()) +} + +function sanitizeObject(obj: any): any { + if (!obj || typeof obj !== 'object') return obj + + const sensitiveFields = getSensitiveBodyFields() + const sanitized = Array.isArray(obj) ? [...obj] : { ...obj } + Object.keys(sanitized).forEach((key) => { + const lowerKey = key.toLowerCase() + if (sensitiveFields.includes(lowerKey)) { + sanitized[key] = '********' + } else if (typeof sanitized[key] === 'string') { + if (sanitized[key].includes('@') && sanitized[key].includes('.')) { + sanitized[key] = sanitized[key].replace(/([^@\s]+)@([^@\s]+)/g, '**********') + } + } + }) + + return sanitized +} + export function expressRequestLogger(req: Request, res: Response, next: NextFunction): void { const unwantedLogURLs = ['/api/v1/node-icon/', '/api/v1/components-credentials-icon/', '/api/v1/ping'] + if (/\/api\/v1\//i.test(req.url) && !unwantedLogURLs.some((url) => new RegExp(url, 'i').test(req.url))) { - const fileLogger = createLogger({ - format: combine(timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }), format.json(), errors({ stack: true })), - defaultMeta: { - package: 'server', - request: { - method: req.method, - url: req.url, - body: req.body, - query: req.query, - params: req.params, - headers: req.headers + const isDebugLevel = logger.level === 'debug' || process.env.DEBUG === 'true' + + const requestMetadata: any = { + request: { + method: req.method, + url: req.url, + params: req.params + } + } + + // Only include headers, body, and query if log level is debug + if (isDebugLevel) { + const sanitizedBody = sanitizeObject(req.body) + const sanitizedQuery = sanitizeObject(req.query) + const sanitizedHeaders = { ...req.headers } + + const sensitiveHeaders = getSensitiveHeaderFields() + sensitiveHeaders.forEach((header) => { + if (sanitizedHeaders[header]) { + sanitizedHeaders[header] = '********' } - }, - transports: [ - ...(!process.env.STORAGE_TYPE || process.env.STORAGE_TYPE === 'local' - ? [ - new transports.File({ - filename: path.join(logDir, config.logging.express.filename ?? 'server-requests.log.jsonl'), - level: config.logging.express.level ?? 'debug' - }) - ] - : []), - ...(process.env.STORAGE_TYPE === 's3' - ? [ - new transports.Stream({ - stream: s3ServerReqStream - }) - ] - : []), - ...(process.env.STORAGE_TYPE === 'gcs' ? [gcsServerReqStream] : []) - ] - }) + }) + + requestMetadata.request.body = sanitizedBody + requestMetadata.request.query = sanitizedQuery + requestMetadata.request.headers = sanitizedHeaders + } const getRequestEmoji = (method: string) => { const requetsEmojis: Record = { @@ -218,10 +273,10 @@ export function expressRequestLogger(req: Request, res: Response, next: NextFunc } if (req.method !== 'GET') { - fileLogger.info(`${getRequestEmoji(req.method)} ${req.method} ${req.url}`) + requestLogger.info(`${getRequestEmoji(req.method)} ${req.method} ${req.url}`, requestMetadata) logger.info(`${getRequestEmoji(req.method)} ${req.method} ${req.url}`) } else { - fileLogger.http(`${getRequestEmoji(req.method)} ${req.method} ${req.url}`) + requestLogger.http(`${getRequestEmoji(req.method)} ${req.method} ${req.url}`, requestMetadata) } } diff --git a/packages/server/src/utils/pagination.ts b/packages/server/src/utils/pagination.ts new file mode 100644 index 000000000..ae81933cd --- /dev/null +++ b/packages/server/src/utils/pagination.ts @@ -0,0 +1,29 @@ +import { InternalFlowiseError } from '../errors/internalFlowiseError' +import { StatusCodes } from 'http-status-codes' +import { Request } from 'express' + +type Pagination = { + page: number + limit: number +} + +export const getPageAndLimitParams = (req: Request): Pagination => { + // by default assume no pagination + let page = -1 + let limit = -1 + if (req.query.page) { + // if page is provided, make sure it's a positive number + page = parseInt(req.query.page as string) + if (page < 0) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: page cannot be negative!`) + } + } + if (req.query.limit) { + // if limit is provided, make sure it's a positive number + limit = parseInt(req.query.limit as string) + if (limit < 0) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: limit cannot be negative!`) + } + } + return { page, limit } +} diff --git a/packages/server/src/utils/prompt.ts b/packages/server/src/utils/prompt.ts index a2862042f..50b3f0b0c 100644 --- a/packages/server/src/utils/prompt.ts +++ b/packages/server/src/utils/prompt.ts @@ -6,6 +6,9 @@ export const ASSISTANT_PROMPT_GENERATOR = `Given a task description, produce a d # Guidelines +- Language: CRITICAL! You MUST respond in the EXACT SAME LANGUAGE as the input task description. + - (e.g., if the task is in Japanese, respond entirely in Japanese; if the task is in English, respond in English. If multiple languages are present, use the primary or dominant one.) + - DO NOT translate the task language into English. - Understand the Task: Grasp the main objective, goals, requirements, constraints, and expected output. - Minimal Changes: If an existing prompt is provided, improve it only if it's simple. For complex prompts, enhance clarity and add missing elements without altering the original structure. - Reasoning Before Conclusions**: Encourage reasoning steps before any conclusions are reached. ATTENTION! If the user provides examples where the reasoning happens afterward, REVERSE the order! NEVER START EXAMPLES WITH CONCLUSIONS! diff --git a/packages/server/src/utils/quotaUsage.ts b/packages/server/src/utils/quotaUsage.ts new file mode 100644 index 000000000..e2cf382d4 --- /dev/null +++ b/packages/server/src/utils/quotaUsage.ts @@ -0,0 +1,171 @@ +import { StatusCodes } from 'http-status-codes' +import { InternalFlowiseError } from '../errors/internalFlowiseError' +import { UsageCacheManager } from '../UsageCacheManager' +import { LICENSE_QUOTAS } from './constants' +import logger from './logger' + +type UsageType = 'flows' | 'users' +export const ENTERPRISE_FEATURE_FLAGS = [ + //'feat:account', // Only for Cloud + 'feat:datasets', + 'feat:evaluations', + 'feat:evaluators', + 'feat:files', + 'feat:login-activity', + 'feat:users', + 'feat:workspaces', + 'feat:logs', + 'feat:roles', + 'feat:sso-config' +] + +export const getCurrentUsage = async (orgId: string, subscriptionId: string, usageCacheManager: UsageCacheManager) => { + try { + if (!usageCacheManager || !subscriptionId || !orgId) return + + const currentStorageUsage = (await usageCacheManager.get(`storage:${orgId}`)) || 0 + const currentPredictionsUsage = (await usageCacheManager.get(`predictions:${orgId}`)) || 0 + + const quotas = await usageCacheManager.getQuotas(subscriptionId) + const storageLimit = quotas[LICENSE_QUOTAS.STORAGE_LIMIT] + const predLimit = quotas[LICENSE_QUOTAS.PREDICTIONS_LIMIT] + + return { + predictions: { + usage: currentPredictionsUsage, + limit: predLimit + }, + storage: { + usage: currentStorageUsage, + limit: storageLimit + } + } + } catch (error) { + logger.error(`[getCurrentUsage] Error getting usage: ${error}`) + throw error + } +} + +// For usage that doesn't renew per month, we just get the count from database and check +export const checkUsageLimit = async ( + type: UsageType, + subscriptionId: string, + usageCacheManager: UsageCacheManager, + currentUsage: number +) => { + if (!usageCacheManager || !subscriptionId) return + + const quotas = await usageCacheManager.getQuotas(subscriptionId) + + let limit = -1 + switch (type) { + case 'flows': + limit = quotas[LICENSE_QUOTAS.FLOWS_LIMIT] + break + case 'users': + limit = quotas[LICENSE_QUOTAS.USERS_LIMIT] + (Math.max(quotas[LICENSE_QUOTAS.ADDITIONAL_SEATS_LIMIT], 0) || 0) + break + } + + if (limit === -1) return + + if (currentUsage > limit) { + throw new InternalFlowiseError(StatusCodes.TOO_MANY_REQUESTS, `Limit exceeded: ${type}`) + } +} + +// As predictions limit renew per month, we set to cache with 1 month TTL +export const updatePredictionsUsage = async ( + orgId: string, + subscriptionId: string, + _: string = '', + usageCacheManager?: UsageCacheManager +) => { + if (!usageCacheManager) return + + const quotas = await usageCacheManager.getQuotas(subscriptionId) + const predictionsLimit = quotas[LICENSE_QUOTAS.PREDICTIONS_LIMIT] + + let currentPredictions = 0 + const existingPredictions = await usageCacheManager.get(`predictions:${orgId}`) + if (existingPredictions) { + currentPredictions = 1 + (existingPredictions as number) > predictionsLimit ? predictionsLimit : 1 + (existingPredictions as number) + } else { + currentPredictions = 1 + } + + const currentTTL = await usageCacheManager.getTTL(`predictions:${orgId}`) + if (currentTTL) { + const currentTimestamp = Date.now() + const timeLeft = currentTTL - currentTimestamp + usageCacheManager.set(`predictions:${orgId}`, currentPredictions, timeLeft) + } else { + const subscriptionDetails = await usageCacheManager.getSubscriptionDetails(subscriptionId) + if (subscriptionDetails && subscriptionDetails.created) { + const MS_PER_DAY = 24 * 60 * 60 * 1000 + const DAYS = 30 + const approximateMonthMs = DAYS * MS_PER_DAY + + // Calculate time elapsed since subscription creation + const createdTimestamp = subscriptionDetails.created * 1000 // Convert to milliseconds if timestamp is in seconds + const currentTimestamp = Date.now() + const timeElapsed = currentTimestamp - createdTimestamp + + // Calculate remaining time in the current month period + const timeLeft = approximateMonthMs - (timeElapsed % approximateMonthMs) + + usageCacheManager.set(`predictions:${orgId}`, currentPredictions, timeLeft) + } else { + // Fallback to default 30 days if no creation date + const MS_PER_DAY = 24 * 60 * 60 * 1000 + const DAYS = 30 + const approximateMonthMs = DAYS * MS_PER_DAY + usageCacheManager.set(`predictions:${orgId}`, currentPredictions, approximateMonthMs) + } + } +} + +export const checkPredictions = async (orgId: string, subscriptionId: string, usageCacheManager: UsageCacheManager) => { + if (!usageCacheManager || !subscriptionId) return + + const currentPredictions: number = (await usageCacheManager.get(`predictions:${orgId}`)) || 0 + + const quotas = await usageCacheManager.getQuotas(subscriptionId) + const predictionsLimit = quotas[LICENSE_QUOTAS.PREDICTIONS_LIMIT] + if (predictionsLimit === -1) return + + if (currentPredictions >= predictionsLimit) { + throw new InternalFlowiseError(StatusCodes.TOO_MANY_REQUESTS, 'Predictions limit exceeded') + } + + return { + usage: currentPredictions, + limit: predictionsLimit + } +} + +// Storage does not renew per month nor do we store the total size in database, so we just store the total size in cache +export const updateStorageUsage = (orgId: string, _: string = '', totalSize: number, usageCacheManager?: UsageCacheManager) => { + if (!usageCacheManager) return + usageCacheManager.set(`storage:${orgId}`, totalSize) +} + +export const checkStorage = async (orgId: string, subscriptionId: string, usageCacheManager: UsageCacheManager) => { + if (!usageCacheManager || !subscriptionId) return + + let currentStorageUsage = 0 + currentStorageUsage = (await usageCacheManager.get(`storage:${orgId}`)) || 0 + + const quotas = await usageCacheManager.getQuotas(subscriptionId) + const storageLimit = quotas[LICENSE_QUOTAS.STORAGE_LIMIT] + if (storageLimit === -1) return + + if (currentStorageUsage >= storageLimit) { + throw new InternalFlowiseError(StatusCodes.TOO_MANY_REQUESTS, 'Storage limit exceeded') + } + + return { + usage: currentStorageUsage, + limit: storageLimit + } +} diff --git a/packages/server/src/utils/sanitize.util.ts b/packages/server/src/utils/sanitize.util.ts new file mode 100644 index 000000000..9d84478f5 --- /dev/null +++ b/packages/server/src/utils/sanitize.util.ts @@ -0,0 +1,42 @@ +import { User } from '../enterprise/database/entities/user.entity' + +export function sanitizeNullBytes(obj: any): any { + const stack = [obj] + + while (stack.length) { + const current = stack.pop() + + if (Array.isArray(current)) { + for (let i = 0; i < current.length; i++) { + const val = current[i] + if (typeof val === 'string') { + // eslint-disable-next-line no-control-regex + current[i] = val.replace(/\u0000/g, '') + } else if (val && typeof val === 'object') { + stack.push(val) + } + } + } else if (current && typeof current === 'object') { + for (const key in current) { + if (!Object.hasOwnProperty.call(current, key)) continue + const val = current[key] + if (typeof val === 'string') { + // eslint-disable-next-line no-control-regex + current[key] = val.replace(/\u0000/g, '') + } else if (val && typeof val === 'object') { + stack.push(val) + } + } + } + } + + return obj +} + +export function sanitizeUser(user: Partial) { + delete user.credential + delete user.tempToken + delete user.tokenExpiry + + return user +} diff --git a/packages/server/src/utils/telemetry.ts b/packages/server/src/utils/telemetry.ts index cd26c8c93..99dc023c7 100644 --- a/packages/server/src/utils/telemetry.ts +++ b/packages/server/src/utils/telemetry.ts @@ -1,8 +1,11 @@ import { v4 as uuidv4 } from 'uuid' import { PostHog } from 'posthog-node' -import path from 'path' -import fs from 'fs' -import { getUserHome, getUserSettingsFilePath } from '.' +import { getAppVersion } from '../utils' + +export enum TelemetryEventType { + 'USER_CREATED' = 'user_created', + 'ORGANIZATION_CREATED' = 'organization_created' +} export class Telemetry { postHog?: PostHog @@ -15,27 +18,10 @@ export class Telemetry { } } - async id(): Promise { - try { - const settingsContent = await fs.promises.readFile(getUserSettingsFilePath(), 'utf8') - const settings = JSON.parse(settingsContent) - return settings.instanceId - } catch (error) { - const instanceId = uuidv4() - const settings = { - instanceId - } - const defaultLocation = process.env.SECRETKEY_PATH - ? path.join(process.env.SECRETKEY_PATH, 'settings.json') - : path.join(getUserHome(), '.flowise', 'settings.json') - await fs.promises.writeFile(defaultLocation, JSON.stringify(settings, null, 2)) - return instanceId - } - } - - async sendTelemetry(event: string, properties = {}): Promise { + async sendTelemetry(event: string, properties: Record = {}, orgId = ''): Promise { + properties.version = await getAppVersion() if (this.postHog) { - const distinctId = await this.id() + const distinctId = orgId || uuidv4() this.postHog.capture({ event, distinctId, diff --git a/packages/server/src/utils/upsertVector.ts b/packages/server/src/utils/upsertVector.ts index c60e5f374..7e705cf5b 100644 --- a/packages/server/src/utils/upsertVector.ts +++ b/packages/server/src/utils/upsertVector.ts @@ -21,18 +21,22 @@ import { getStartingNodes, getAPIOverrideConfig } from '../utils' -import { validateChatflowAPIKey } from './validateKey' +import { validateFlowAPIKey } from './validateKey' import { IncomingInput, INodeDirectedGraph, IReactFlowObject, ChatType, IExecuteFlowParams, MODE } from '../Interface' import { ChatFlow } from '../database/entities/ChatFlow' import { getRunningExpressApp } from '../utils/getRunningExpressApp' import { UpsertHistory } from '../database/entities/UpsertHistory' import { InternalFlowiseError } from '../errors/internalFlowiseError' import { StatusCodes } from 'http-status-codes' +import { checkStorage, updateStorageUsage } from './quotaUsage' import { getErrorMessage } from '../errors/utils' import { v4 as uuidv4 } from 'uuid' import { FLOWISE_COUNTER_STATUS, FLOWISE_METRIC_COUNTERS } from '../Interface.Metrics' import { Variable } from '../database/entities/Variable' +import { getWorkspaceSearchOptions } from '../enterprise/utils/ControllerServiceUtils' import { OMIT_QUEUE_JOB_DATA } from './constants' +import { Workspace } from '../enterprise/database/entities/workspace.entity' +import { Organization } from '../enterprise/database/entities/organization.entity' export const executeUpsert = async ({ componentNodes, @@ -43,7 +47,11 @@ export const executeUpsert = async ({ telemetry, cachePool, isInternal, - files + files, + orgId, + workspaceId, + subscriptionId, + usageCacheManager }: IExecuteFlowParams) => { const question = incomingInput.question let overrideConfig = incomingInput.overrideConfig ?? {} @@ -56,11 +64,21 @@ export const executeUpsert = async ({ if (files?.length) { overrideConfig = { ...incomingInput } for (const file of files) { + await checkStorage(orgId, subscriptionId, usageCacheManager) + const fileNames: string[] = [] const fileBuffer = await getFileFromUpload(file.path ?? file.key) // Address file name with special characters: https://github.com/expressjs/multer/issues/1104 file.originalname = Buffer.from(file.originalname, 'latin1').toString('utf8') - const storagePath = await addArrayFilesToStorage(file.mimetype, fileBuffer, file.originalname, fileNames, chatflowid) + const { path: storagePath, totalSize } = await addArrayFilesToStorage( + file.mimetype, + fileBuffer, + file.originalname, + fileNames, + orgId, + chatflowid + ) + await updateStorageUsage(orgId, workspaceId, totalSize, usageCacheManager) const fileInputFieldFromMimeType = mapMimeTypeToInputField(file.mimetype) @@ -147,7 +165,7 @@ export const executeUpsert = async ({ const { startingNodeIds, depthQueue } = getStartingNodes(filteredGraph, stopNodeId) /*** Get API Config ***/ - const availableVariables = await appDataSource.getRepository(Variable).find() + const availableVariables = await appDataSource.getRepository(Variable).findBy(getWorkspaceSearchOptions(chatflow.workspaceId)) const { nodeOverrides, variableOverrides, apiOverrideStatus } = getAPIOverrideConfig(chatflow) const upsertedResult = await buildFlow({ @@ -164,14 +182,20 @@ export const executeUpsert = async ({ sessionId, chatflowid, appDataSource, + usageCacheManager, + cachePool, + isUpsert, + stopNodeId, overrideConfig, apiOverrideStatus, nodeOverrides, availableVariables, variableOverrides, - cachePool, - isUpsert, - stopNodeId + orgId, + workspaceId, + subscriptionId, + updateStorageUsage, + checkStorage }) // Save to DB @@ -186,13 +210,17 @@ export const executeUpsert = async ({ await appDataSource.getRepository(UpsertHistory).save(upsertHistory) } - await telemetry.sendTelemetry('vector_upserted', { - version: await getAppVersion(), - chatlowId: chatflowid, - type: isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, - flowGraph: getTelemetryFlowObj(nodes, edges), - stopNodeId - }) + await telemetry.sendTelemetry( + 'vector_upserted', + { + version: await getAppVersion(), + chatlowId: chatflowid, + type: isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, + flowGraph: getTelemetryFlowObj(nodes, edges), + stopNodeId + }, + orgId + ) return upsertedResult['result'] ?? { result: 'Successfully Upserted' } } @@ -204,6 +232,7 @@ export const executeUpsert = async ({ */ export const upsertVector = async (req: Request, isInternal: boolean = false) => { const appServer = getRunningExpressApp() + try { const chatflowid = req.params.id @@ -222,12 +251,33 @@ export const upsertVector = async (req: Request, isInternal: boolean = false) => const files = (req.files as Express.Multer.File[]) || [] if (!isInternal) { - const isKeyValidated = await validateChatflowAPIKey(req, chatflow) + const isKeyValidated = await validateFlowAPIKey(req, chatflow) if (!isKeyValidated) { throw new InternalFlowiseError(StatusCodes.UNAUTHORIZED, `Unauthorized`) } } + // This can be public API, so we can only get orgId from the chatflow + const chatflowWorkspaceId = chatflow.workspaceId + const workspace = await appServer.AppDataSource.getRepository(Workspace).findOneBy({ + id: chatflowWorkspaceId + }) + if (!workspace) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Workspace ${chatflowWorkspaceId} not found`) + } + const workspaceId = workspace.id + + const org = await appServer.AppDataSource.getRepository(Organization).findOneBy({ + id: workspace.organizationId + }) + if (!org) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Organization ${workspace.organizationId} not found`) + } + + const orgId = org.id + const subscriptionId = org.subscriptionId as string + const productId = await appServer.identityManager.getProductIdFromSubscription(subscriptionId) + const executeData: IExecuteFlowParams = { componentNodes: appServer.nodesPool.componentNodes, incomingInput, @@ -237,17 +287,22 @@ export const upsertVector = async (req: Request, isInternal: boolean = false) => telemetry: appServer.telemetry, cachePool: appServer.cachePool, sseStreamer: appServer.sseStreamer, + usageCacheManager: appServer.usageCacheManager, baseURL, isInternal, files, - isUpsert: true + isUpsert: true, + orgId, + workspaceId, + subscriptionId, + productId } if (process.env.MODE === MODE.QUEUE) { const upsertQueue = appServer.queueManager.getQueue('upsert') const job = await upsertQueue.addJob(omit(executeData, OMIT_QUEUE_JOB_DATA)) - logger.debug(`[server]: Job added to queue: ${job.id}`) + logger.debug(`[server]: [${orgId}]: Job added to queue: ${job.id}`) const queueEvents = upsertQueue.getQueueEvents() const result = await job.waitUntilFinished(queueEvents) diff --git a/packages/server/src/utils/validateKey.ts b/packages/server/src/utils/validateKey.ts index 2eef55f2d..840735895 100644 --- a/packages/server/src/utils/validateKey.ts +++ b/packages/server/src/utils/validateKey.ts @@ -1,14 +1,15 @@ import { Request } from 'express' import { ChatFlow } from '../database/entities/ChatFlow' +import { ApiKey } from '../database/entities/ApiKey' import { compareKeys } from './apiKey' import apikeyService from '../services/apikey' /** - * Validate Chatflow API Key + * Validate flow API Key, this is needed because Prediction/Upsert API is public * @param {Request} req * @param {ChatFlow} chatflow */ -export const validateChatflowAPIKey = async (req: Request, chatflow: ChatFlow) => { +export const validateFlowAPIKey = async (req: Request, chatflow: ChatFlow): Promise => { const chatFlowApiKeyId = chatflow?.apikeyid if (!chatFlowApiKeyId) return true @@ -16,30 +17,52 @@ export const validateChatflowAPIKey = async (req: Request, chatflow: ChatFlow) = if (chatFlowApiKeyId && !authorizationHeader) return false const suppliedKey = authorizationHeader.split(`Bearer `).pop() - if (suppliedKey) { - const keys = await apikeyService.getAllApiKeys() - const apiSecret = keys.find((key: any) => key.id === chatFlowApiKeyId)?.apiSecret - if (!compareKeys(apiSecret, suppliedKey)) return false + if (!suppliedKey) return false + + try { + const apiKey = await apikeyService.getApiKeyById(chatFlowApiKeyId) + if (!apiKey) return false + + const apiKeyWorkSpaceId = apiKey.workspaceId + if (!apiKeyWorkSpaceId) return false + + if (apiKeyWorkSpaceId !== chatflow.workspaceId) return false + + const apiSecret = apiKey.apiSecret + if (!apiSecret || !compareKeys(apiSecret, suppliedKey)) return false + return true + } catch (error) { + return false } - return false } /** - * Validate API Key + * Validate and Get API Key Information * @param {Request} req + * @returns {Promise<{isValid: boolean, apiKey?: ApiKey, workspaceId?: string}>} */ -export const validateAPIKey = async (req: Request) => { +export const validateAPIKey = async (req: Request): Promise<{ isValid: boolean; apiKey?: ApiKey; workspaceId?: string }> => { const authorizationHeader = (req.headers['Authorization'] as string) ?? (req.headers['authorization'] as string) ?? '' - if (!authorizationHeader) return false + if (!authorizationHeader) return { isValid: false } const suppliedKey = authorizationHeader.split(`Bearer `).pop() - if (suppliedKey) { - const keys = await apikeyService.getAllApiKeys() - const apiSecret = keys.find((key: any) => key.apiKey === suppliedKey)?.apiSecret - if (!apiSecret) return false - if (!compareKeys(apiSecret, suppliedKey)) return false - return true + if (!suppliedKey) return { isValid: false } + + try { + const apiKey = await apikeyService.getApiKey(suppliedKey) + if (!apiKey) return { isValid: false } + + const apiKeyWorkSpaceId = apiKey.workspaceId + if (!apiKeyWorkSpaceId) return { isValid: false } + + const apiSecret = apiKey.apiSecret + if (!apiSecret || !compareKeys(apiSecret, suppliedKey)) { + return { isValid: false, apiKey, workspaceId: apiKey.workspaceId } + } + + return { isValid: true, apiKey, workspaceId: apiKey.workspaceId } + } catch (error) { + return { isValid: false } } - return false } diff --git a/packages/server/test/index.test.ts b/packages/server/test/index.test.ts new file mode 100644 index 000000000..8c038f44f --- /dev/null +++ b/packages/server/test/index.test.ts @@ -0,0 +1,28 @@ +import * as Server from '../src' +import { getRunningExpressApp } from '../src/utils/getRunningExpressApp' +import { organizationUserRouteTest } from './routes/v1/organization-user.route.test' +import { userRouteTest } from './routes/v1/user.route.test' +import { apiKeyTest } from './utils/api-key.util.test' + +// โฑ๏ธ Extend test timeout to 6 minutes for long setups (increase as tests grow) +jest.setTimeout(360000) + +beforeAll(async () => { + await Server.start() + + // โณ Wait 3 minutes for full server and database init (esp. on lower end hardware) + await new Promise((resolve) => setTimeout(resolve, 3 * 60 * 1000)) +}) + +afterAll(async () => { + await getRunningExpressApp().stopApp() +}) + +describe('Routes Test', () => { + userRouteTest() + organizationUserRouteTest() +}) + +describe('Utils Test', () => { + apiKeyTest() +}) diff --git a/packages/server/test/routes/v1/organization-user.route.test.ts b/packages/server/test/routes/v1/organization-user.route.test.ts new file mode 100644 index 000000000..d143c2e54 --- /dev/null +++ b/packages/server/test/routes/v1/organization-user.route.test.ts @@ -0,0 +1,39 @@ +import { StatusCodes } from 'http-status-codes' +import supertest from 'supertest' +import { getRunningExpressApp } from '../../../src/utils/getRunningExpressApp' + +export function organizationUserRouteTest() { + describe('Organization User Route', () => { + const route = '/api/v1/user' + + describe(`GET ${route}/test successful without user status`, () => { + const statusCode = StatusCodes.OK + const message = 'Hello World' + + it(`should return a ${statusCode} status and message of ${message}`, async () => { + await supertest(getRunningExpressApp().app) + .get(`${route + '/test'}`) + .expect(statusCode) + .then((response) => { + const body = response.body + expect(body.message).toEqual(message) + }) + }) + }) + + describe(`POST ${route}/test successful without user status`, () => { + const statusCode = StatusCodes.OK + const message = 'Hello World' + + it(`should return a ${statusCode} status and message of ${message}`, async () => { + await supertest(getRunningExpressApp().app) + .get(`${route + '/test'}`) + .expect(statusCode) + .then((response) => { + const body = response.body + expect(body.message).toEqual(message) + }) + }) + }) + }) +} diff --git a/packages/server/test/routes/v1/user.route.test.ts b/packages/server/test/routes/v1/user.route.test.ts new file mode 100644 index 000000000..cdab9d995 --- /dev/null +++ b/packages/server/test/routes/v1/user.route.test.ts @@ -0,0 +1,54 @@ +import { StatusCodes } from 'http-status-codes' +import supertest from 'supertest' +import { getRunningExpressApp } from '../../../src/utils/getRunningExpressApp' + +export function userRouteTest() { + describe('User Route', () => { + const route = '/api/v1/user' + + describe(`GET ${route}/test successful without user status`, () => { + const statusCode = StatusCodes.OK + const message = 'Hello World' + + it(`should return a ${statusCode} status and message of ${message}`, async () => { + await supertest(getRunningExpressApp().app) + .get(`${route + '/test'}`) + .expect(statusCode) + .then((response) => { + const body = response.body + expect(body.message).toEqual(message) + }) + }) + }) + + describe(`POST ${route}/test successful without user status`, () => { + const statusCode = StatusCodes.OK + const message = 'Hello World' + + it(`should return a ${statusCode} status and message of ${message}`, async () => { + await supertest(getRunningExpressApp().app) + .get(`${route + '/test'}`) + .expect(statusCode) + .then((response) => { + const body = response.body + expect(body.message).toEqual(message) + }) + }) + }) + + describe(`PUT ${route}/test successful without user status`, () => { + const statusCode = StatusCodes.OK + const message = 'Hello World' + + it(`should return a ${statusCode} status and message of ${message}`, async () => { + await supertest(getRunningExpressApp().app) + .get(`${route + '/test'}`) + .expect(statusCode) + .then((response) => { + const body = response.body + expect(body.message).toEqual(message) + }) + }) + }) + }) +} diff --git a/packages/server/test/utils/api-key.util.test.ts b/packages/server/test/utils/api-key.util.test.ts new file mode 100644 index 000000000..ccf1bfd5a --- /dev/null +++ b/packages/server/test/utils/api-key.util.test.ts @@ -0,0 +1,10 @@ +import { generateAPIKey } from '../../src/utils/apiKey' + +export function apiKeyTest() { + describe('Api Key', () => { + it('should be able to generate a new api key', () => { + const apiKey = generateAPIKey() + expect(typeof apiKey === 'string').toEqual(true) + }) + }) +} diff --git a/packages/server/test/utils/validateKey.test.ts b/packages/server/test/utils/validateKey.test.ts deleted file mode 100644 index cf3552c1e..000000000 --- a/packages/server/test/utils/validateKey.test.ts +++ /dev/null @@ -1,41 +0,0 @@ -import { Request } from 'express' -import { ChatFlow } from '../../src/database/entities/ChatFlow' -import { validateChatflowAPIKey } from '../../src/utils/validateKey' -import { compareKeys, getAPIKeys } from '../../src/utils/apiKey' - -jest.mock('../../src/utils/apiKey') - -describe('validateChatflowAPIKey', () => { - let req: Partial - let chatflow: ChatFlow - - beforeEach(() => { - req = { - headers: {} - } - chatflow = { - apikeyid: null - } as ChatFlow - }) - - it('should return true if chatflow.apikeyid is not set', async () => { - const result = await validateChatflowAPIKey(req as Request, chatflow) - expect(result).toBe(true) - }) - - it('should return false if chatflow.apikeyid is set but authorization header is missing', async () => { - chatflow.apikeyid = 'some-api-key-id' - const result = await validateChatflowAPIKey(req as Request, chatflow) - expect(result).toBe(false) - }) - - it('should return false if supplied key does not match the expected key', async () => { - chatflow.apikeyid = 'some-api-key-id' - req.headers['authorization'] = 'Bearer invalid-key' - ;(getAPIKeys as jest.Mock).mockResolvedValue([{ id: 'some-api-key-id', apiSecret: 'expected-secret-key' }]) - ;(compareKeys as jest.Mock).mockImplementation((expected, supplied) => expected === supplied) - - const result = await validateChatflowAPIKey(req as Request, chatflow) - expect(result).toBe(false) - }) -}) diff --git a/packages/server/tsconfig.json b/packages/server/tsconfig.json index c92c623cd..fa2e8b56f 100644 --- a/packages/server/tsconfig.json +++ b/packages/server/tsconfig.json @@ -15,5 +15,5 @@ "declaration": true }, "include": ["src/**/*.ts"], - "exclude": ["node_modules", "**/*.test.ts"] + "exclude": ["node_modules"] } diff --git a/packages/ui/index.html b/packages/ui/index.html index 1992cea06..9fcbcb60a 100644 --- a/packages/ui/index.html +++ b/packages/ui/index.html @@ -12,6 +12,8 @@ name="description" content="Open source generative AI development platform for building AI agents, LLM orchestration, and more" /> + + @@ -39,6 +41,17 @@ href="https://fonts.googleapis.com/css2?family=Inter:wght@400;500;600;700&family=Poppins:wght@400;500;600;700&family=Roboto:wght@400;500;700&display=swap" rel="stylesheet" /> + + diff --git a/packages/ui/package.json b/packages/ui/package.json index a29bccc55..75e532fcd 100644 --- a/packages/ui/package.json +++ b/packages/ui/package.json @@ -1,6 +1,6 @@ { "name": "flowise-ui", - "version": "3.0.0", + "version": "3.0.11", "license": "SEE LICENSE IN LICENSE.md", "homepage": "https://flowiseai.com", "author": { @@ -10,7 +10,8 @@ "dependencies": { "@codemirror/lang-javascript": "^6.2.1", "@codemirror/lang-json": "^6.0.1", - "@codemirror/view": "^6.22.3", + "@codemirror/lang-markdown": "^6.2.5", + "@codemirror/view": "^6.26.3", "@emotion/cache": "^11.4.0", "@emotion/react": "^11.10.6", "@emotion/styled": "^11.10.6", @@ -23,7 +24,9 @@ "@mui/system": "^6.4.3", "@mui/x-data-grid": "6.8.0", "@mui/x-tree-view": "^7.25.0", + "@reduxjs/toolkit": "^2.2.7", "@tabler/icons-react": "^3.30.0", + "@tiptap/extension-code-block-lowlight": "^3.4.3", "@tiptap/extension-mention": "^2.11.5", "@tiptap/extension-placeholder": "^2.11.5", "@tiptap/pm": "^2.11.5", @@ -32,8 +35,9 @@ "@uiw/codemirror-theme-sublime": "^4.21.21", "@uiw/codemirror-theme-vscode": "^4.21.21", "@uiw/react-codemirror": "^4.21.21", - "axios": "1.7.9", + "axios": "1.12.0", "clsx": "^1.1.1", + "dompurify": "^3.2.6", "dotenv": "^16.0.0", "flowise-embed": "latest", "flowise-embed-react": "latest", @@ -43,6 +47,7 @@ "history": "^5.0.0", "html-react-parser": "^3.0.4", "lodash": "^4.17.21", + "lowlight": "^3.3.0", "moment": "^2.29.3", "notistack": "^2.0.4", "prop-types": "^15.7.2", @@ -60,11 +65,13 @@ "react-router-dom": "~6.3.0", "react-syntax-highlighter": "^15.5.0", "reactflow": "^11.5.6", + "recharts": "^2.12.6", "redux": "^4.0.5", "rehype-mathjax": "^4.0.2", "rehype-raw": "^7.0.0", "remark-gfm": "^3.0.1", "remark-math": "^5.1.1", + "showdown": "^2.1.0", "tippy.js": "^6.3.7", "uuid": "^9.0.1", "yup": "^0.32.9" diff --git a/packages/ui/public/logo192.png b/packages/ui/public/logo192.png new file mode 100644 index 000000000..8ad932c48 Binary files /dev/null and b/packages/ui/public/logo192.png differ diff --git a/packages/ui/public/logo512.png b/packages/ui/public/logo512.png new file mode 100644 index 000000000..9132ee39a Binary files /dev/null and b/packages/ui/public/logo512.png differ diff --git a/packages/ui/public/manifest.json b/packages/ui/public/manifest.json new file mode 100644 index 000000000..3cd0187f0 --- /dev/null +++ b/packages/ui/public/manifest.json @@ -0,0 +1,30 @@ +{ + "short_name": "Flowise", + "name": "Flowise App", + "icons": [ + { + "src": "favicon.ico", + "sizes": "64x64 32x32 24x24 16x16", + "type": "image/x-icon" + }, + { + "src": "logo192.png", + "type": "image/png", + "sizes": "192x192", + "purpose": "any maskable" + }, + { + "src": "logo512.png", + "type": "image/png", + "sizes": "512x512", + "purpose": "any maskable" + } + ], + "start_url": ".", + "display": "standalone", + "theme_color": "#000000", + "background_color": "#ffffff", + "orientation": "any", + "scope": "/", + "prefer_related_applications": false +} diff --git a/packages/ui/src/ErrorBoundary.jsx b/packages/ui/src/ErrorBoundary.jsx index 9745013fa..25e955508 100644 --- a/packages/ui/src/ErrorBoundary.jsx +++ b/packages/ui/src/ErrorBoundary.jsx @@ -30,7 +30,7 @@ const ErrorBoundary = ({ error }) => {
                             {`Status: ${error.response.status}`}
                             
- {error.response.data.message} + {error.response?.data?.message}
diff --git a/packages/ui/src/api/account.api.js b/packages/ui/src/api/account.api.js new file mode 100644 index 000000000..2f6f62066 --- /dev/null +++ b/packages/ui/src/api/account.api.js @@ -0,0 +1,25 @@ +import client from '@/api/client' + +const inviteAccount = (body) => client.post(`/account/invite`, body) +const registerAccount = (body) => client.post(`/account/register`, body) +const verifyAccountEmail = (body) => client.post('/account/verify', body) +const resendVerificationEmail = (body) => client.post('/account/resend-verification', body) +const forgotPassword = (body) => client.post('/account/forgot-password', body) +const resetPassword = (body) => client.post('/account/reset-password', body) +const getBillingData = () => client.post('/account/billing') +const logout = () => client.post('/account/logout') +const getBasicAuth = () => client.get('/account/basic-auth') +const checkBasicAuth = (body) => client.post('/account/basic-auth', body) + +export default { + getBillingData, + inviteAccount, + registerAccount, + verifyAccountEmail, + resendVerificationEmail, + forgotPassword, + resetPassword, + logout, + getBasicAuth, + checkBasicAuth +} diff --git a/packages/ui/src/api/apikey.js b/packages/ui/src/api/apikey.js index ca554d574..a8483e43d 100644 --- a/packages/ui/src/api/apikey.js +++ b/packages/ui/src/api/apikey.js @@ -1,6 +1,6 @@ import client from './client' -const getAllAPIKeys = () => client.get('/apikey') +const getAllAPIKeys = (params) => client.get('/apikey', { params }) const createNewAPI = (body) => client.post(`/apikey`, body) diff --git a/packages/ui/src/api/audit.js b/packages/ui/src/api/audit.js new file mode 100644 index 000000000..197180a3d --- /dev/null +++ b/packages/ui/src/api/audit.js @@ -0,0 +1,9 @@ +import client from './client' + +const fetchLoginActivity = (body) => client.post(`/audit/login-activity`, body) +const deleteLoginActivity = (body) => client.post(`/audit/login-activity/delete`, body) + +export default { + fetchLoginActivity, + deleteLoginActivity +} diff --git a/packages/ui/src/api/auth.js b/packages/ui/src/api/auth.js new file mode 100644 index 000000000..50cac09a7 --- /dev/null +++ b/packages/ui/src/api/auth.js @@ -0,0 +1,16 @@ +import client from './client' + +// auth +const resolveLogin = (body) => client.post(`/auth/resolve`, body) +const login = (body) => client.post(`/auth/login`, body) + +// permissions +const getAllPermissions = () => client.get(`/auth/permissions`) +const ssoSuccess = (token) => client.get(`/auth/sso-success?token=${token}`) + +export default { + resolveLogin, + login, + getAllPermissions, + ssoSuccess +} diff --git a/packages/ui/src/api/chatflows.js b/packages/ui/src/api/chatflows.js index 8bd47f4f7..a5d4f323a 100644 --- a/packages/ui/src/api/chatflows.js +++ b/packages/ui/src/api/chatflows.js @@ -1,8 +1,8 @@ import client from './client' -const getAllChatflows = () => client.get('/chatflows?type=CHATFLOW') +const getAllChatflows = (params) => client.get('/chatflows?type=CHATFLOW', { params }) -const getAllAgentflows = (type) => client.get(`/chatflows?type=${type}`) +const getAllAgentflows = (type, params) => client.get(`/chatflows?type=${type}`, { params }) const getSpecificChatflow = (id) => client.get(`/chatflows/${id}`) @@ -10,8 +10,6 @@ const getSpecificChatflowFromPublicEndpoint = (id) => client.get(`/public-chatfl const createNewChatflow = (body) => client.post(`/chatflows`, body) -const importChatflows = (body) => client.post(`/chatflows/importchatflows`, body) - const updateChatflow = (id, body) => client.put(`/chatflows/${id}`, body) const deleteChatflow = (id) => client.delete(`/chatflows/${id}`) @@ -20,6 +18,8 @@ const getIsChatflowStreaming = (id) => client.get(`/chatflows-streaming/${id}`) const getAllowChatflowUploads = (id) => client.get(`/chatflows-uploads/${id}`) +const getHasChatflowChanged = (id, lastUpdatedDateTime) => client.get(`/chatflows/has-changed/${id}/${lastUpdatedDateTime}`) + const generateAgentflow = (body) => client.post(`/agentflowv2-generator/generate`, body) export default { @@ -28,10 +28,10 @@ export default { getSpecificChatflow, getSpecificChatflowFromPublicEndpoint, createNewChatflow, - importChatflows, updateChatflow, deleteChatflow, getIsChatflowStreaming, getAllowChatflowUploads, + getHasChatflowChanged, generateAgentflow } diff --git a/packages/ui/src/api/client.js b/packages/ui/src/api/client.js index d2dd87333..205e49155 100644 --- a/packages/ui/src/api/client.js +++ b/packages/ui/src/api/client.js @@ -1,26 +1,39 @@ import axios from 'axios' -import { baseURL } from '@/store/constant' +import { baseURL, ErrorMessage } from '@/store/constant' +import AuthUtils from '@/utils/authUtils' const apiClient = axios.create({ baseURL: `${baseURL}/api/v1`, headers: { 'Content-type': 'application/json', 'x-request-from': 'internal' - } + }, + withCredentials: true }) -apiClient.interceptors.request.use(function (config) { - const username = localStorage.getItem('username') - const password = localStorage.getItem('password') - - if (username && password) { - config.auth = { - username, - password +apiClient.interceptors.response.use( + function (response) { + return response + }, + async (error) => { + if (error.response.status === 401) { + // check if refresh is needed + if (error.response.data.message === ErrorMessage.TOKEN_EXPIRED && error.response.data.retry === true) { + const originalRequest = error.config + // call api to get new token + const response = await axios.post(`${baseURL}/api/v1/auth/refreshToken`, {}, { withCredentials: true }) + if (response.data.id) { + // retry the original request + return apiClient.request(originalRequest) + } + } + localStorage.removeItem('username') + localStorage.removeItem('password') + AuthUtils.removeCurrentUser() } - } - return config -}) + return Promise.reject(error) + } +) export default apiClient diff --git a/packages/ui/src/api/dataset.js b/packages/ui/src/api/dataset.js new file mode 100644 index 000000000..125dc035c --- /dev/null +++ b/packages/ui/src/api/dataset.js @@ -0,0 +1,30 @@ +import client from './client' + +const getAllDatasets = (params) => client.get('/datasets', { params }) + +//dataset +const getDataset = (id, params) => client.get(`/datasets/set/${id}`, { params }) +const createDataset = (body) => client.post(`/datasets/set`, body) +const updateDataset = (id, body) => client.put(`/datasets/set/${id}`, body) +const deleteDataset = (id) => client.delete(`/datasets/set/${id}`) + +//rows +const createDatasetRow = (body) => client.post(`/datasets/rows`, body) +const updateDatasetRow = (id, body) => client.put(`/datasets/rows/${id}`, body) +const deleteDatasetRow = (id) => client.delete(`/datasets/rows/${id}`) +const deleteDatasetItems = (ids) => client.patch(`/datasets/rows`, { ids }) + +const reorderDatasetRow = (body) => client.post(`/datasets/reorder`, body) + +export default { + getAllDatasets, + getDataset, + createDataset, + updateDataset, + deleteDataset, + createDatasetRow, + updateDatasetRow, + deleteDatasetRow, + deleteDatasetItems, + reorderDatasetRow +} diff --git a/packages/ui/src/api/documentstore.js b/packages/ui/src/api/documentstore.js index cb6211b97..ac0c4bbff 100644 --- a/packages/ui/src/api/documentstore.js +++ b/packages/ui/src/api/documentstore.js @@ -1,6 +1,6 @@ import client from './client' -const getAllDocumentStores = () => client.get('/document-store/store') +const getAllDocumentStores = (params) => client.get('/document-store/store', { params }) const getDocumentLoaders = () => client.get('/document-store/components/loaders') const getSpecificDocumentStore = (id) => client.get(`/document-store/store/${id}`) const createDocumentStore = (body) => client.post(`/document-store/store`, body) diff --git a/packages/ui/src/api/evaluations.js b/packages/ui/src/api/evaluations.js new file mode 100644 index 000000000..8a1f0cbf8 --- /dev/null +++ b/packages/ui/src/api/evaluations.js @@ -0,0 +1,22 @@ +import client from './client' + +//evaluation +const getAllEvaluations = (params) => client.get('/evaluations', { params }) +const getIsOutdated = (id) => client.get(`/evaluations/is-outdated/${id}`) +const getEvaluation = (id) => client.get(`/evaluations/${id}`) +const createEvaluation = (body) => client.post(`/evaluations`, body) +const deleteEvaluation = (id) => client.delete(`/evaluations/${id}`) +const runAgain = (id) => client.post(`/evaluations/run-again/${id}`) +const getVersions = (id) => client.get(`/evaluations/versions/${id}`) +const deleteEvaluations = (ids, isDeleteAllVersion) => client.patch(`/evaluations`, { ids, isDeleteAllVersion }) + +export default { + createEvaluation, + deleteEvaluation, + getAllEvaluations, + getEvaluation, + getIsOutdated, + runAgain, + getVersions, + deleteEvaluations +} diff --git a/packages/ui/src/api/evaluators.js b/packages/ui/src/api/evaluators.js new file mode 100644 index 000000000..f496b3921 --- /dev/null +++ b/packages/ui/src/api/evaluators.js @@ -0,0 +1,17 @@ +import client from './client' + +const getAllEvaluators = (params) => client.get('/evaluators', { params }) + +//evaluators +const createEvaluator = (body) => client.post(`/evaluators`, body) +const getEvaluator = (id) => client.get(`/evaluators/${id}`) +const updateEvaluator = (id, body) => client.put(`/evaluators/${id}`, body) +const deleteEvaluator = (id) => client.delete(`/evaluators/${id}`) + +export default { + getAllEvaluators, + createEvaluator, + getEvaluator, + updateEvaluator, + deleteEvaluator +} diff --git a/packages/ui/src/api/files.js b/packages/ui/src/api/files.js new file mode 100644 index 000000000..3a082afc5 --- /dev/null +++ b/packages/ui/src/api/files.js @@ -0,0 +1,10 @@ +import client from './client' + +const getAllFiles = () => client.get('/files') + +const deleteFile = (path) => client.delete(`/files`, { params: { path } }) + +export default { + getAllFiles, + deleteFile +} diff --git a/packages/ui/src/api/log.js b/packages/ui/src/api/log.js new file mode 100644 index 000000000..2b3130096 --- /dev/null +++ b/packages/ui/src/api/log.js @@ -0,0 +1,7 @@ +import client from './client' + +const getLogs = (startDate, endDate) => client.get(`/logs?startDate=${startDate}&endDate=${endDate}`) + +export default { + getLogs +} diff --git a/packages/ui/src/api/loginmethod.js b/packages/ui/src/api/loginmethod.js new file mode 100644 index 000000000..2c6928d42 --- /dev/null +++ b/packages/ui/src/api/loginmethod.js @@ -0,0 +1,16 @@ +import client from '@/api/client' + +// TODO: use this endpoint but without the org id because org id will be null +const getLoginMethods = (organizationId) => client.get(`/loginmethod?organizationId=${organizationId}`) +// TODO: don't use this endpoint. +const getDefaultLoginMethods = () => client.get(`/loginmethod/default`) +const updateLoginMethods = (body) => client.put(`/loginmethod`, body) + +const testLoginMethod = (body) => client.post(`/loginmethod/test`, body) + +export default { + getLoginMethods, + updateLoginMethods, + testLoginMethod, + getDefaultLoginMethods +} diff --git a/packages/ui/src/api/nodes.js b/packages/ui/src/api/nodes.js index d0ef3a74a..f9eff943d 100644 --- a/packages/ui/src/api/nodes.js +++ b/packages/ui/src/api/nodes.js @@ -7,9 +7,12 @@ const getNodesByCategory = (name) => client.get(`/nodes/category/${name}`) const executeCustomFunctionNode = (body) => client.post(`/node-custom-function`, body) +const executeNodeLoadMethod = (name, body) => client.post(`/node-load-method/${name}`, body) + export default { getAllNodes, getSpecificNode, executeCustomFunctionNode, - getNodesByCategory + getNodesByCategory, + executeNodeLoadMethod } diff --git a/packages/ui/src/api/oauth2.js b/packages/ui/src/api/oauth2.js new file mode 100644 index 000000000..6546a504b --- /dev/null +++ b/packages/ui/src/api/oauth2.js @@ -0,0 +1,13 @@ +import client from './client' + +const authorize = (credentialId) => client.post(`/oauth2-credential/authorize/${credentialId}`) + +const refresh = (credentialId) => client.post(`/oauth2-credential/refresh/${credentialId}`) + +const getCallback = (queryParams) => client.get(`/oauth2-credential/callback?${queryParams}`) + +export default { + authorize, + refresh, + getCallback +} diff --git a/packages/ui/src/api/platformsettings.js b/packages/ui/src/api/platformsettings.js new file mode 100644 index 000000000..4f5278a1d --- /dev/null +++ b/packages/ui/src/api/platformsettings.js @@ -0,0 +1,7 @@ +import client from './client' + +const getSettings = () => client.get('/settings') + +export default { + getSettings +} diff --git a/packages/ui/src/api/pricing.js b/packages/ui/src/api/pricing.js new file mode 100644 index 000000000..efd5b4751 --- /dev/null +++ b/packages/ui/src/api/pricing.js @@ -0,0 +1,7 @@ +import client from '@/api/client' + +const getPricingPlans = (body) => client.get(`/pricing`, body) + +export default { + getPricingPlans +} diff --git a/packages/ui/src/api/role.js b/packages/ui/src/api/role.js new file mode 100644 index 000000000..632e13a64 --- /dev/null +++ b/packages/ui/src/api/role.js @@ -0,0 +1,17 @@ +import client from './client' + +const getAllRolesByOrganizationId = (organizationId) => client.get(`/role?organizationId=${organizationId}`) +const getRoleById = (id) => client.get(`/auth/roles/${id}`) +const createRole = (body) => client.post(`/role`, body) +const updateRole = (body) => client.put(`/role`, body) +const getRoleByName = (name) => client.get(`/auth/roles/name/${name}`) +const deleteRole = (id, organizationId) => client.delete(`/role?id=${id}&organizationId=${organizationId}`) + +export default { + getAllRolesByOrganizationId, + getRoleById, + createRole, + updateRole, + getRoleByName, + deleteRole +} diff --git a/packages/ui/src/api/sso.js b/packages/ui/src/api/sso.js new file mode 100644 index 000000000..81855fc00 --- /dev/null +++ b/packages/ui/src/api/sso.js @@ -0,0 +1,7 @@ +import client from './client' + +const ssoLogin = (providerName) => client.get(`/${providerName}/login`) + +export default { + ssoLogin +} diff --git a/packages/ui/src/api/tools.js b/packages/ui/src/api/tools.js index 77992a2ab..5e9b7b559 100644 --- a/packages/ui/src/api/tools.js +++ b/packages/ui/src/api/tools.js @@ -1,6 +1,6 @@ import client from './client' -const getAllTools = () => client.get('/tools') +const getAllTools = (params) => client.get('/tools', { params }) const getSpecificTool = (id) => client.get(`/tools/${id}`) diff --git a/packages/ui/src/api/tts.js b/packages/ui/src/api/tts.js new file mode 100644 index 000000000..bf528cd8a --- /dev/null +++ b/packages/ui/src/api/tts.js @@ -0,0 +1,16 @@ +import client from './client' + +const abortTTS = (body) => client.post('/text-to-speech/abort', body) + +const generateVoice = (body) => + client.post('/text-to-speech/generate', body, { + responseType: 'arraybuffer' + }) + +const listVoices = (params) => client.get('/text-to-speech/voices', { params }) + +export default { + abortTTS, + generateVoice, + listVoices +} diff --git a/packages/ui/src/api/user.js b/packages/ui/src/api/user.js new file mode 100644 index 000000000..86165ec9c --- /dev/null +++ b/packages/ui/src/api/user.js @@ -0,0 +1,59 @@ +import client from './client' + +// users +const getUserById = (id) => client.get(`/user?id=${id}`) +const updateUser = (body) => client.put(`/user`, body) + +// organization users +const getAllUsersByOrganizationId = (organizationId) => client.get(`/organizationuser?organizationId=${organizationId}`) +const getUserByUserIdOrganizationId = (organizationId, userId) => + client.get(`/organizationuser?organizationId=${organizationId}&userId=${userId}`) +const getOrganizationsByUserId = (userId) => client.get(`/organizationuser?userId=${userId}`) +const updateOrganizationUser = (body) => client.put(`/organizationuser`, body) +const deleteOrganizationUser = (organizationId, userId) => + client.delete(`/organizationuser?organizationId=${organizationId}&userId=${userId}`) + +const getAdditionalSeatsQuantity = (subscriptionId) => + client.get(`/organization/additional-seats-quantity?subscriptionId=${subscriptionId}`) +const getCustomerDefaultSource = (customerId) => client.get(`/organization/customer-default-source?customerId=${customerId}`) +const getAdditionalSeatsProration = (subscriptionId, quantity) => + client.get(`/organization/additional-seats-proration?subscriptionId=${subscriptionId}&quantity=${quantity}`) +const updateAdditionalSeats = (subscriptionId, quantity, prorationDate) => + client.post(`/organization/update-additional-seats`, { subscriptionId, quantity, prorationDate }) +const getPlanProration = (subscriptionId, newPlanId) => + client.get(`/organization/plan-proration?subscriptionId=${subscriptionId}&newPlanId=${newPlanId}`) +const updateSubscriptionPlan = (subscriptionId, newPlanId, prorationDate) => + client.post(`/organization/update-subscription-plan`, { subscriptionId, newPlanId, prorationDate }) +const getCurrentUsage = () => client.get(`/organization/get-current-usage`) + +// workspace users +const getAllUsersByWorkspaceId = (workspaceId) => client.get(`/workspaceuser?workspaceId=${workspaceId}`) +const getUserByRoleId = (roleId) => client.get(`/workspaceuser?roleId=${roleId}`) +const getUserByUserIdWorkspaceId = (userId, workspaceId) => client.get(`/workspaceuser?userId=${userId}&workspaceId=${workspaceId}`) +const getWorkspacesByUserId = (userId) => client.get(`/workspaceuser?userId=${userId}`) +const getWorkspacesByOrganizationIdUserId = (organizationId, userId) => + client.get(`/workspaceuser?organizationId=${organizationId}&userId=${userId}`) +const deleteWorkspaceUser = (workspaceId, userId) => client.delete(`/workspaceuser?workspaceId=${workspaceId}&userId=${userId}`) + +export default { + getUserById, + updateUser, + getAllUsersByOrganizationId, + getUserByUserIdOrganizationId, + getOrganizationsByUserId, + getAllUsersByWorkspaceId, + getUserByRoleId, + getUserByUserIdWorkspaceId, + getWorkspacesByUserId, + getWorkspacesByOrganizationIdUserId, + updateOrganizationUser, + deleteWorkspaceUser, + getAdditionalSeatsQuantity, + getCustomerDefaultSource, + getAdditionalSeatsProration, + updateAdditionalSeats, + getPlanProration, + updateSubscriptionPlan, + getCurrentUsage, + deleteOrganizationUser +} diff --git a/packages/ui/src/api/variables.js b/packages/ui/src/api/variables.js index 944b83198..4285f184c 100644 --- a/packages/ui/src/api/variables.js +++ b/packages/ui/src/api/variables.js @@ -1,6 +1,6 @@ import client from './client' -const getAllVariables = () => client.get('/variables') +const getAllVariables = (params) => client.get('/variables', { params }) const createVariable = (body) => client.post(`/variables`, body) diff --git a/packages/ui/src/api/workspace.js b/packages/ui/src/api/workspace.js new file mode 100644 index 000000000..2c771fe08 --- /dev/null +++ b/packages/ui/src/api/workspace.js @@ -0,0 +1,34 @@ +import client from './client' + +const getAllWorkspacesByOrganizationId = (organizationId) => client.get(`/workspace?organizationId=${organizationId}`) + +const getWorkspaceById = (id) => client.get(`/workspace?id=${id}`) + +const unlinkUsers = (id, body) => client.post(`/workspace/unlink-users/${id}`, body) +const linkUsers = (id, body) => client.post(`/workspace/link-users/${id}`, body) + +const switchWorkspace = (id) => client.post(`/workspace/switch?id=${id}`) + +const createWorkspace = (body) => client.post(`/workspace`, body) +const updateWorkspace = (body) => client.put(`/workspace`, body) +const deleteWorkspace = (id) => client.delete(`/workspace/${id}`) + +const getSharedWorkspacesForItem = (id) => client.get(`/workspace/shared/${id}`) +const setSharedWorkspacesForItem = (id, body) => client.post(`/workspace/shared/${id}`, body) + +const updateWorkspaceUserRole = (body) => client.put(`/workspaceuser`, body) + +export default { + getAllWorkspacesByOrganizationId, + getWorkspaceById, + createWorkspace, + updateWorkspace, + deleteWorkspace, + unlinkUsers, + linkUsers, + switchWorkspace, + getSharedWorkspacesForItem, + setSharedWorkspacesForItem, + + updateWorkspaceUserRole +} diff --git a/packages/ui/src/assets/images/auth0.svg b/packages/ui/src/assets/images/auth0.svg new file mode 100644 index 000000000..e02b0da74 --- /dev/null +++ b/packages/ui/src/assets/images/auth0.svg @@ -0,0 +1,2 @@ + + \ No newline at end of file diff --git a/packages/ui/src/assets/images/contact_support.svg b/packages/ui/src/assets/images/contact_support.svg new file mode 100644 index 000000000..c429f0504 --- /dev/null +++ b/packages/ui/src/assets/images/contact_support.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/ui/src/assets/images/elevenlabs.svg b/packages/ui/src/assets/images/elevenlabs.svg new file mode 100644 index 000000000..5e3632613 --- /dev/null +++ b/packages/ui/src/assets/images/elevenlabs.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/packages/ui/src/assets/images/empty_datasets.svg b/packages/ui/src/assets/images/empty_datasets.svg new file mode 100644 index 000000000..907b4545e --- /dev/null +++ b/packages/ui/src/assets/images/empty_datasets.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/ui/src/assets/images/empty_evals.svg b/packages/ui/src/assets/images/empty_evals.svg new file mode 100644 index 000000000..cb879aace --- /dev/null +++ b/packages/ui/src/assets/images/empty_evals.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/ui/src/assets/images/empty_evaluators.svg b/packages/ui/src/assets/images/empty_evaluators.svg new file mode 100644 index 000000000..222d0f811 --- /dev/null +++ b/packages/ui/src/assets/images/empty_evaluators.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/ui/src/assets/images/gemini.png b/packages/ui/src/assets/images/gemini.png new file mode 100644 index 000000000..b569dc9a7 Binary files /dev/null and b/packages/ui/src/assets/images/gemini.png differ diff --git a/packages/ui/src/assets/images/github.svg b/packages/ui/src/assets/images/github.svg new file mode 100644 index 000000000..9c6b13d67 --- /dev/null +++ b/packages/ui/src/assets/images/github.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/ui/src/assets/images/google.svg b/packages/ui/src/assets/images/google.svg new file mode 100644 index 000000000..088288fa3 --- /dev/null +++ b/packages/ui/src/assets/images/google.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/ui/src/assets/images/logs_empty.svg b/packages/ui/src/assets/images/logs_empty.svg new file mode 100644 index 000000000..61df7e32e --- /dev/null +++ b/packages/ui/src/assets/images/logs_empty.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/ui/src/assets/images/microsoft-azure.svg b/packages/ui/src/assets/images/microsoft-azure.svg new file mode 100644 index 000000000..1f7397648 --- /dev/null +++ b/packages/ui/src/assets/images/microsoft-azure.svg @@ -0,0 +1 @@ +MS-SymbolLockup \ No newline at end of file diff --git a/packages/ui/src/assets/images/roles_empty.svg b/packages/ui/src/assets/images/roles_empty.svg new file mode 100644 index 000000000..154e89dca --- /dev/null +++ b/packages/ui/src/assets/images/roles_empty.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/ui/src/assets/images/unauthorized.svg b/packages/ui/src/assets/images/unauthorized.svg new file mode 100644 index 000000000..0b3db213b --- /dev/null +++ b/packages/ui/src/assets/images/unauthorized.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/ui/src/assets/images/users_empty.svg b/packages/ui/src/assets/images/users_empty.svg new file mode 100644 index 000000000..4b6fb6d97 --- /dev/null +++ b/packages/ui/src/assets/images/users_empty.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/ui/src/assets/images/workspaces_empty.svg b/packages/ui/src/assets/images/workspaces_empty.svg new file mode 100644 index 000000000..43ae0cd00 --- /dev/null +++ b/packages/ui/src/assets/images/workspaces_empty.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/ui/src/assets/scss/style.scss b/packages/ui/src/assets/scss/style.scss index 52e700403..bda0dbb7b 100644 --- a/packages/ui/src/assets/scss/style.scss +++ b/packages/ui/src/assets/scss/style.scss @@ -132,6 +132,80 @@ content: '\200B'; } } + + pre { + background: var(--code-bg, #2d2d2d) !important; + border-radius: 0.5rem; + color: var(--code-color, #d4d4d4) !important; + font-family: 'JetBrainsMono', 'Fira Code', 'Monaco', 'Cascadia Code', 'Roboto Mono', monospace; + margin: 1.5rem 0; + padding: 0.75rem 1rem; + + code { + background: none !important; + color: inherit !important; + font-size: 0.8rem; + padding: 0; + } + + /* Syntax highlighting matching the screenshot colors */ + .hljs-comment, + .hljs-quote { + color: var(--hljs-comment, #6a9955) !important; + } + + .hljs-variable, + .hljs-name { + color: var(--hljs-variable, #9cdcfe) !important; /* Light blue for variables */ + } + + .hljs-number, + .hljs-literal { + color: var(--hljs-number, #b5cea8) !important; /* Light green for numbers */ + } + + .hljs-string { + color: var(--hljs-string, #ce9178) !important; /* Orange/peach for strings */ + } + + .hljs-title, + .hljs-built_in, + .hljs-builtin-name { + color: var(--hljs-title, #dcdcaa) !important; /* Yellow for function names */ + } + + .hljs-keyword, + .hljs-selector-tag { + color: var(--hljs-keyword, #569cd6) !important; /* Blue for keywords */ + } + + /* Additional elements that should match the base text color */ + .hljs-operator, + .hljs-punctuation, + .hljs-template-variable, + .hljs-attribute, + .hljs-tag, + .hljs-regexp, + .hljs-link, + .hljs-selector-id, + .hljs-selector-class, + .hljs-meta, + .hljs-type, + .hljs-params, + .hljs-symbol, + .hljs-bullet, + .hljs-section { + color: var(--code-color, #d4d4d4) !important; /* Default text color */ + } + + .hljs-emphasis { + font-style: italic; + } + + .hljs-strong { + font-weight: 700; + } + } } .spin-animation { diff --git a/packages/ui/src/hooks/useApi.jsx b/packages/ui/src/hooks/useApi.jsx index 932f0a6e0..58f8bf09c 100644 --- a/packages/ui/src/hooks/useApi.jsx +++ b/packages/ui/src/hooks/useApi.jsx @@ -1,25 +1,30 @@ import { useState } from 'react' +import { useError } from '@/store/context/ErrorContext' export default (apiFunc) => { const [data, setData] = useState(null) - const [error, setError] = useState(null) const [loading, setLoading] = useState(false) + const [error, setApiError] = useState(null) + const { setError, handleError } = useError() const request = async (...args) => { setLoading(true) try { const result = await apiFunc(...args) setData(result.data) + setError(null) + setApiError(null) } catch (err) { - setError(err || 'Unexpected Error!') + handleError(err || 'Unexpected Error!') + setApiError(err || 'Unexpected Error!') } finally { setLoading(false) } } return { - data, error, + data, loading, request } diff --git a/packages/ui/src/hooks/useAuth.jsx b/packages/ui/src/hooks/useAuth.jsx new file mode 100644 index 000000000..8a8f2635b --- /dev/null +++ b/packages/ui/src/hooks/useAuth.jsx @@ -0,0 +1,54 @@ +import { useSelector } from 'react-redux' +import { useConfig } from '@/store/context/ConfigContext' + +export const useAuth = () => { + const { isOpenSource } = useConfig() + const permissions = useSelector((state) => state.auth.permissions) + const features = useSelector((state) => state.auth.features) + const isGlobal = useSelector((state) => state.auth.isGlobal) + const currentUser = useSelector((state) => state.auth.user) + + const hasPermission = (permissionId) => { + if (isOpenSource || isGlobal) { + return true + } + if (!permissionId) return false + const permissionIds = permissionId.split(',') + if (permissions && permissions.length) { + return permissionIds.some((permissionId) => permissions.includes(permissionId)) + } + return false + } + + const hasAssignedWorkspace = (workspaceId) => { + if (isOpenSource || isGlobal) { + return true + } + const activeWorkspaceId = currentUser?.activeWorkspaceId || '' + if (workspaceId === activeWorkspaceId) { + return true + } + return false + } + + const hasDisplay = (display) => { + if (!display) { + return true + } + + // if it has display flag, but user has no features, then it should not be displayed + if (!features || Array.isArray(features) || Object.keys(features).length === 0) { + return false + } + + // check if the display flag is in the features + if (Object.hasOwnProperty.call(features, display)) { + const flag = features[display] === 'true' || features[display] === true + return flag + } + + return false + } + + return { hasPermission, hasAssignedWorkspace, hasDisplay } +} diff --git a/packages/ui/src/index.jsx b/packages/ui/src/index.jsx index bf2d49b0e..d8833eed4 100644 --- a/packages/ui/src/index.jsx +++ b/packages/ui/src/index.jsx @@ -12,6 +12,8 @@ import { Provider } from 'react-redux' import { SnackbarProvider } from 'notistack' import ConfirmContextProvider from '@/store/context/ConfirmContextProvider' import { ReactFlowContext } from '@/store/context/ReactFlowContext' +import { ConfigProvider } from '@/store/context/ConfigContext' +import { ErrorProvider } from '@/store/context/ErrorContext' const container = document.getElementById('root') const root = createRoot(container) @@ -21,11 +23,15 @@ root.render( - - - - - + + + + + + + + + diff --git a/packages/ui/src/layout/AuthLayout/index.jsx b/packages/ui/src/layout/AuthLayout/index.jsx new file mode 100644 index 000000000..a93896e0e --- /dev/null +++ b/packages/ui/src/layout/AuthLayout/index.jsx @@ -0,0 +1,29 @@ +import { Outlet } from 'react-router-dom' +import { Box, useTheme } from '@mui/material' + +// ==============================|| MINIMAL LAYOUT ||============================== // + +const AuthLayout = () => { + const theme = useTheme() + + return ( + + + + ) +} + +export default AuthLayout diff --git a/packages/ui/src/layout/MainLayout/Header/OrgWorkspaceBreadcrumbs/index.jsx b/packages/ui/src/layout/MainLayout/Header/OrgWorkspaceBreadcrumbs/index.jsx new file mode 100644 index 000000000..2381dfee0 --- /dev/null +++ b/packages/ui/src/layout/MainLayout/Header/OrgWorkspaceBreadcrumbs/index.jsx @@ -0,0 +1,435 @@ +import { useState, useEffect } from 'react' +import { useNavigate } from 'react-router-dom' +import { useSelector } from 'react-redux' + +// material-ui +import { + Breadcrumbs, + Menu, + MenuItem, + Dialog, + DialogContent, + CircularProgress, + Typography, + Stack, + Chip, + ListItemText, + ListItemIcon, + Select +} from '@mui/material' +import { Check } from '@mui/icons-material' +import { alpha, styled, emphasize } from '@mui/material/styles' + +import { IconChevronDown } from '@tabler/icons-react' + +// api +import userApi from '@/api/user' +import workspaceApi from '@/api/workspace' + +// hooks +import useApi from '@/hooks/useApi' + +// store +import { store } from '@/store' +import { workspaceSwitchSuccess } from '@/store/reducers/authSlice' + +// ==============================|| OrgWorkspaceBreadcrumbs ||============================== // + +const StyledMenu = styled((props) => ( +
+))(({ theme }) => ({ + '& .MuiPaper-root': { + borderRadius: 6, + marginTop: theme.spacing(1), + minWidth: 180, + boxShadow: + 'rgb(255, 255, 255) 0px 0px 0px 0px, rgba(0, 0, 0, 0.05) 0px 0px 0px 1px, rgba(0, 0, 0, 0.1) 0px 10px 15px -3px, rgba(0, 0, 0, 0.05) 0px 4px 6px -2px', + '& .MuiMenu-list': { + padding: '4px 0' + }, + '& .MuiMenuItem-root': { + '& .MuiSvgIcon-root': { + fontSize: 18, + color: theme.palette.text.secondary, + marginRight: theme.spacing(1.5) + }, + '&:active': { + backgroundColor: alpha(theme.palette.primary.main, theme.palette.action.selectedOpacity) + } + } + } +})) + +const StyledBreadcrumb = styled(Chip)(({ theme, isDarkMode }) => { + const backgroundColor = isDarkMode ? theme.palette.grey[800] : theme.palette.grey[100] + return { + backgroundColor, + height: theme.spacing(3), + color: theme.palette.text.primary, + fontWeight: theme.typography.fontWeightRegular, + '&:hover, &:focus': { + backgroundColor: emphasize(backgroundColor, 0.06) + }, + '&:active': { + boxShadow: theme.shadows[1], + backgroundColor: emphasize(backgroundColor, 0.12) + } + } +}) + +const OrgWorkspaceBreadcrumbs = () => { + const navigate = useNavigate() + + const user = useSelector((state) => state.auth.user) + const isAuthenticated = useSelector((state) => state.auth.isAuthenticated) + const customization = useSelector((state) => state.customization) + + const [orgAnchorEl, setOrgAnchorEl] = useState(null) + const [workspaceAnchorEl, setWorkspaceAnchorEl] = useState(null) + const orgMenuOpen = Boolean(orgAnchorEl) + const workspaceMenuOpen = Boolean(workspaceAnchorEl) + + const [assignedOrganizations, setAssignedOrganizations] = useState([]) + const [activeOrganizationId, setActiveOrganizationId] = useState(undefined) + const [assignedWorkspaces, setAssignedWorkspaces] = useState([]) + const [activeWorkspaceId, setActiveWorkspaceId] = useState(undefined) + const [isWorkspaceSwitching, setIsWorkspaceSwitching] = useState(false) + const [isOrganizationSwitching, setIsOrganizationSwitching] = useState(false) + const [showWorkspaceUnavailableDialog, setShowWorkspaceUnavailableDialog] = useState(false) + + const getOrganizationsByUserIdApi = useApi(userApi.getOrganizationsByUserId) + const getWorkspacesByUserIdApi = useApi(userApi.getWorkspacesByUserId) + const switchWorkspaceApi = useApi(workspaceApi.switchWorkspace) + + const handleOrgClick = (event) => { + setOrgAnchorEl(event.currentTarget) + } + + const handleWorkspaceClick = (event) => { + setWorkspaceAnchorEl(event.currentTarget) + } + + const handleOrgClose = () => { + setOrgAnchorEl(null) + } + + const handleWorkspaceClose = () => { + setWorkspaceAnchorEl(null) + } + + const handleOrgSwitch = async (orgId) => { + setOrgAnchorEl(null) + if (activeOrganizationId !== orgId) { + setIsOrganizationSwitching(true) + setActiveOrganizationId(orgId) + // Fetch workspaces for the new organization + getWorkspacesByUserIdApi.request(user.id) + } + } + + const handleUnavailableOrgSwitch = async (orgId) => { + setOrgAnchorEl(null) + setActiveOrganizationId(orgId) + // Fetch workspaces for the new organization + try { + const response = await userApi.getWorkspacesByUserId(user.id) + const workspaces = response.data + const filteredAssignedWorkspaces = workspaces.filter((item) => item.workspace.organizationId === orgId) + const formattedAssignedWorkspaces = filteredAssignedWorkspaces.map((item) => ({ + id: item.workspaceId, + name: item.workspace.name + })) + + const sortedWorkspaces = [...formattedAssignedWorkspaces].sort((a, b) => a.name.localeCompare(b.name)) + + setAssignedWorkspaces(sortedWorkspaces) + } catch (error) { + console.error('Error fetching workspaces:', error) + } + } + + const switchWorkspace = async (id) => { + setWorkspaceAnchorEl(null) + if (activeWorkspaceId !== id) { + setIsWorkspaceSwitching(true) + switchWorkspaceApi.request(id) + } + } + + useEffect(() => { + // Fetch workspaces when component mounts + if (isAuthenticated && user) { + getOrganizationsByUserIdApi.request(user.id) + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [isAuthenticated, user]) + + useEffect(() => { + if (getWorkspacesByUserIdApi.data) { + const filteredAssignedWorkspaces = getWorkspacesByUserIdApi.data.filter( + (item) => item.workspace.organizationId === activeOrganizationId + ) + const formattedAssignedWorkspaces = filteredAssignedWorkspaces.map((item) => ({ + id: item.workspaceId, + name: item.workspace.name + })) + + const sortedWorkspaces = [...formattedAssignedWorkspaces].sort((a, b) => a.name.localeCompare(b.name)) + + // Only check workspace availability if we're not in the process of switching organizations + if (!isOrganizationSwitching) { + setTimeout(() => { + if (user && user.activeWorkspaceId && !sortedWorkspaces.find((item) => item.id === user.activeWorkspaceId)) { + setShowWorkspaceUnavailableDialog(true) + } + }, 500) + } + + setAssignedWorkspaces(sortedWorkspaces) + + if (isOrganizationSwitching && sortedWorkspaces.length > 0) { + // After organization switch, switch to the first workspace in the list + switchWorkspaceApi.request(sortedWorkspaces[0].id) + } else { + setIsOrganizationSwitching(false) + } + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getWorkspacesByUserIdApi.data]) + + useEffect(() => { + if (getWorkspacesByUserIdApi.error) { + setIsWorkspaceSwitching(false) + } + }, [getWorkspacesByUserIdApi.error]) + + useEffect(() => { + if (getOrganizationsByUserIdApi.data) { + const formattedAssignedOrgs = getOrganizationsByUserIdApi.data.map((organization) => ({ + id: organization.organizationId, + name: `${organization.user.name || organization.user.email}'s Organization` + })) + + const sortedOrgs = [...formattedAssignedOrgs].sort((a, b) => a.name.localeCompare(b.name)) + // Only check workspace availability after a short delay to allow store updates to complete + setTimeout(() => { + if (user && user.activeOrganizationId && !sortedOrgs.find((item) => item.id === user.activeOrganizationId)) { + setActiveOrganizationId(undefined) + setShowWorkspaceUnavailableDialog(true) + } + }, 500) + + setAssignedOrganizations(sortedOrgs) + + getWorkspacesByUserIdApi.request(user.id) + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getOrganizationsByUserIdApi.data]) + + useEffect(() => { + if (getOrganizationsByUserIdApi.error) { + setIsOrganizationSwitching(false) + } + }, [getOrganizationsByUserIdApi.error]) + + useEffect(() => { + if (switchWorkspaceApi.data) { + setIsWorkspaceSwitching(false) + setIsOrganizationSwitching(false) + store.dispatch(workspaceSwitchSuccess(switchWorkspaceApi.data)) + + // get the current path and navigate to the same after refresh + navigate('/', { replace: true }) + navigate(0) + } + }, [switchWorkspaceApi.data, navigate]) + + useEffect(() => { + if (switchWorkspaceApi.error) { + setIsWorkspaceSwitching(false) + setIsOrganizationSwitching(false) + } + }, [switchWorkspaceApi.error]) + + useEffect(() => { + setActiveOrganizationId(user.activeOrganizationId) + setActiveWorkspaceId(user.activeWorkspaceId) + }, [user]) + + return ( + <> + {isAuthenticated && user ? ( + <> + + {assignedOrganizations.map((org) => ( + handleOrgSwitch(org.id)} selected={org.id === activeOrganizationId}> + {org.name} + {org.id === activeOrganizationId && ( + + + + )} + + ))} + + + {assignedWorkspaces.map((workspace) => ( + switchWorkspace(workspace.id)} + selected={workspace.id === activeWorkspaceId} + > + {workspace.name} + {workspace.id === activeWorkspaceId && ( + + + + )} + + ))} + + + org.id === activeOrganizationId)?.name || 'Organization'} + deleteIcon={} + onDelete={handleOrgClick} + onClick={handleOrgClick} + /> + ws.id === activeWorkspaceId)?.name || 'Workspace'} + deleteIcon={} + onDelete={handleWorkspaceClick} + onClick={handleWorkspaceClick} + /> + + + ) : null} + + + + + + Switching organization... + + + + + + + + + + Switching workspace... + + + + + + + + Workspace Unavailable + {assignedWorkspaces.length > 0 && !activeOrganizationId ? ( + <> + + Your current workspace is no longer available. Please select another workspace to continue. + + + + ) : ( + <> + + Workspace is no longer available. Please select a different organization/workspace to continue. + + + {activeOrganizationId && assignedWorkspaces.length > 0 && ( + + )} + + )} + + + + + ) +} + +OrgWorkspaceBreadcrumbs.propTypes = {} + +export default OrgWorkspaceBreadcrumbs diff --git a/packages/ui/src/layout/MainLayout/Header/ProfileSection/index.jsx b/packages/ui/src/layout/MainLayout/Header/ProfileSection/index.jsx index 5e008be92..7b75dc071 100644 --- a/packages/ui/src/layout/MainLayout/Header/ProfileSection/index.jsx +++ b/packages/ui/src/layout/MainLayout/Header/ProfileSection/index.jsx @@ -1,10 +1,12 @@ -import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction, REMOVE_DIRTY } from '@/store/actions' -import { exportData, stringify } from '@/utils/exportImport' -import useNotifier from '@/utils/useNotifier' import PropTypes from 'prop-types' import { useEffect, useRef, useState } from 'react' import { createPortal } from 'react-dom' import { useDispatch, useSelector } from 'react-redux' +import { useNavigate } from 'react-router-dom' + +import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction, REMOVE_DIRTY } from '@/store/actions' +import { exportData, stringify } from '@/utils/exportImport' +import useNotifier from '@/utils/useNotifier' // material-ui import { @@ -35,22 +37,22 @@ import { useTheme } from '@mui/material/styles' import PerfectScrollbar from 'react-perfect-scrollbar' // project imports +import { PermissionListItemButton } from '@/ui-component/button/RBACButtons' import MainCard from '@/ui-component/cards/MainCard' import AboutDialog from '@/ui-component/dialog/AboutDialog' import Transitions from '@/ui-component/extended/Transitions' // assets import ExportingGIF from '@/assets/images/Exporting.gif' -import { IconFileExport, IconFileUpload, IconInfoCircle, IconLogout, IconSettings, IconX } from '@tabler/icons-react' +import { IconFileExport, IconFileUpload, IconInfoCircle, IconLogout, IconSettings, IconUserEdit, IconX } from '@tabler/icons-react' import './index.css' -//API +// API import exportImportApi from '@/api/exportimport' // Hooks import useApi from '@/hooks/useApi' import { getErrorMessage } from '@/utils/errorHandler' -import { useNavigate } from 'react-router-dom' const dataToExport = [ 'Agentflows', @@ -98,7 +100,14 @@ const ExportDialog = ({ show, onCancel, onExport }) => { {!isExporting && ( - + {dataToExport.map((data, index) => ( { + const portalElement = document.getElementById('portal') + + const component = show ? ( + + + Importing... + + + +
+ ImportingGIF + Importing data might takes a while +
+
+
+
+ ) : null + + return createPortal(component, portalElement) +} + +ImportDialog.propTypes = { + show: PropTypes.bool +} + // ==============================|| PROFILE MENU ||============================== // -const ProfileSection = ({ username, handleLogout }) => { +const ProfileSection = ({ handleLogout }) => { const theme = useTheme() const customization = useSelector((state) => state.customization) const [open, setOpen] = useState(false) const [aboutDialogOpen, setAboutDialogOpen] = useState(false) + const [exportDialogOpen, setExportDialogOpen] = useState(false) + const [importDialogOpen, setImportDialogOpen] = useState(false) const anchorRef = useRef(null) const inputRef = useRef() const navigate = useNavigate() + const currentUser = useSelector((state) => state.auth.user) + const isAuthenticated = useSelector((state) => state.auth.isAuthenticated) const importAllApi = useApi(exportImportApi.importData) const exportAllApi = useApi(exportImportApi.exportData) @@ -223,6 +270,7 @@ const ProfileSection = ({ username, handleLogout }) => { if (!e.target.files) return const file = e.target.files[0] + setImportDialogOpen(true) const reader = new FileReader() reader.onload = (evt) => { @@ -236,6 +284,7 @@ const ProfileSection = ({ username, handleLogout }) => { } const importAllSuccess = () => { + setImportDialogOpen(false) dispatch({ type: REMOVE_DIRTY }) enqueueSnackbar({ message: `Import All successful`, @@ -284,6 +333,7 @@ const ProfileSection = ({ username, handleLogout }) => { useEffect(() => { if (importAllApi.error) { + setImportDialogOpen(false) let errMsg = 'Invalid Imported File' let error = importAllApi.error if (error?.response?.data) { @@ -331,7 +381,6 @@ const ProfileSection = ({ username, handleLogout }) => { if (prevOpen.current === true && open === false) { anchorRef.current.focus() } - prevOpen.current = open }, [open]) @@ -380,10 +429,16 @@ const ProfileSection = ({ username, handleLogout }) => { - {username && ( + {isAuthenticated && currentUser ? ( - {username} + {currentUser.name} + + + ) : ( + + + User )} @@ -406,7 +461,8 @@ const ProfileSection = ({ username, handleLogout }) => { } }} > - { setExportDialogOpen(true) @@ -416,8 +472,9 @@ const ProfileSection = ({ username, handleLogout }) => { Export} /> - - + { importAll() @@ -427,7 +484,7 @@ const ProfileSection = ({ username, handleLogout }) => { Import} /> - + { - About Flowise} /> + Version} /> - {localStorage.getItem('username') && localStorage.getItem('password') && ( + {isAuthenticated && !currentUser.isSSO && ( { + setOpen(false) + navigate('/account') + }} > - + - Logout} /> + Account Settings} /> )} + + + + + Logout} /> + @@ -463,12 +532,12 @@ const ProfileSection = ({ username, handleLogout }) => { setAboutDialogOpen(false)} /> setExportDialogOpen(false)} onExport={(data) => onExport(data)} /> + ) } ProfileSection.propTypes = { - username: PropTypes.string, handleLogout: PropTypes.func } diff --git a/packages/ui/src/layout/MainLayout/Header/WorkspaceSwitcher/index.jsx b/packages/ui/src/layout/MainLayout/Header/WorkspaceSwitcher/index.jsx new file mode 100644 index 000000000..0a058d74e --- /dev/null +++ b/packages/ui/src/layout/MainLayout/Header/WorkspaceSwitcher/index.jsx @@ -0,0 +1,386 @@ +import { useEffect, useRef, useState } from 'react' +import { useSelector } from 'react-redux' +import { useNavigate } from 'react-router-dom' + +// material-ui +import { Check } from '@mui/icons-material' +import KeyboardArrowDownIcon from '@mui/icons-material/KeyboardArrowDown' +import { + Dialog, + DialogContent, + CircularProgress, + Button, + Select, + Typography, + Stack, + ListItemIcon, + ListItemText, + Menu, + MenuItem, + DialogActions +} from '@mui/material' +import { alpha, styled } from '@mui/material/styles' + +// api +import userApi from '@/api/user' +import workspaceApi from '@/api/workspace' +import accountApi from '@/api/account.api' + +// hooks +import useApi from '@/hooks/useApi' +import { useConfig } from '@/store/context/ConfigContext' + +// store +import { store } from '@/store' +import { logoutSuccess, workspaceSwitchSuccess } from '@/store/reducers/authSlice' + +// ==============================|| WORKSPACE SWITCHER ||============================== // + +const StyledMenu = styled((props) => ( +
+))(({ theme }) => ({ + '& .MuiPaper-root': { + borderRadius: 6, + marginTop: theme.spacing(1), + minWidth: 180, + boxShadow: + 'rgb(255, 255, 255) 0px 0px 0px 0px, rgba(0, 0, 0, 0.05) 0px 0px 0px 1px, rgba(0, 0, 0, 0.1) 0px 10px 15px -3px, rgba(0, 0, 0, 0.05) 0px 4px 6px -2px', + '& .MuiMenu-list': { + padding: '4px 0' + }, + '& .MuiMenuItem-root': { + '& .MuiSvgIcon-root': { + fontSize: 18, + color: theme.palette.text.secondary, + marginRight: theme.spacing(1.5) + }, + '&:active': { + backgroundColor: alpha(theme.palette.primary.main, theme.palette.action.selectedOpacity) + } + } + } +})) + +const WorkspaceSwitcher = () => { + const navigate = useNavigate() + + const user = useSelector((state) => state.auth.user) + const isAuthenticated = useSelector((state) => state.auth.isAuthenticated) + const features = useSelector((state) => state.auth.features) + + const { isEnterpriseLicensed } = useConfig() + + const [anchorEl, setAnchorEl] = useState(null) + const open = Boolean(anchorEl) + const prevOpen = useRef(open) + + const [assignedWorkspaces, setAssignedWorkspaces] = useState([]) + const [activeWorkspace, setActiveWorkspace] = useState(undefined) + const [isSwitching, setIsSwitching] = useState(false) + const [showWorkspaceUnavailableDialog, setShowWorkspaceUnavailableDialog] = useState(false) + const [showErrorDialog, setShowErrorDialog] = useState(false) + const [errorMessage, setErrorMessage] = useState('') + + const getWorkspacesByOrganizationIdUserIdApi = useApi(userApi.getWorkspacesByOrganizationIdUserId) + const getWorkspacesByUserIdApi = useApi(userApi.getWorkspacesByUserId) + const switchWorkspaceApi = useApi(workspaceApi.switchWorkspace) + const logoutApi = useApi(accountApi.logout) + + const handleClick = (event) => { + setAnchorEl(event.currentTarget) + } + + const handleClose = () => { + setAnchorEl(null) + } + + const switchWorkspace = async (id) => { + setAnchorEl(null) + if (activeWorkspace !== id) { + setIsSwitching(true) + switchWorkspaceApi.request(id) + } + } + + const handleLogout = () => { + logoutApi.request() + } + + useEffect(() => { + // Fetch workspaces when component mounts + if (isAuthenticated && user) { + const WORKSPACE_FLAG = 'feat:workspaces' + if (Object.hasOwnProperty.call(features, WORKSPACE_FLAG)) { + const flag = features[WORKSPACE_FLAG] === 'true' || features[WORKSPACE_FLAG] === true + if (flag) { + if (isEnterpriseLicensed) { + getWorkspacesByOrganizationIdUserIdApi.request(user.activeOrganizationId, user.id) + } else { + getWorkspacesByUserIdApi.request(user.id) + } + } + } + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [isAuthenticated, user, features, isEnterpriseLicensed]) + + useEffect(() => { + if (getWorkspacesByOrganizationIdUserIdApi.data) { + const formattedAssignedWorkspaces = getWorkspacesByOrganizationIdUserIdApi.data.map((item) => ({ + id: item.workspaceId, + name: item.workspace.name + })) + + const sortedWorkspaces = [...formattedAssignedWorkspaces].sort((a, b) => a.name.localeCompare(b.name)) + + // Only check workspace availability after a short delay to allow store updates to complete + setTimeout(() => { + if (user && user.activeWorkspaceId && !sortedWorkspaces.find((item) => item.id === user.activeWorkspaceId)) { + setShowWorkspaceUnavailableDialog(true) + } + }, 500) + + setAssignedWorkspaces(sortWorkspaces(sortedWorkspaces)) + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getWorkspacesByOrganizationIdUserIdApi.data, user.activeWorkspaceId]) + + useEffect(() => { + if (getWorkspacesByUserIdApi.data) { + const formattedAssignedWorkspaces = getWorkspacesByUserIdApi.data.map((item) => ({ + id: item.workspaceId, + name: item.workspace.name + })) + + const sortedWorkspaces = [...formattedAssignedWorkspaces].sort((a, b) => a.name.localeCompare(b.name)) + + // Only check workspace availability after a short delay to allow store updates to complete + setTimeout(() => { + if (user && user.activeWorkspaceId && !sortedWorkspaces.find((item) => item.id === user.activeWorkspaceId)) { + setShowWorkspaceUnavailableDialog(true) + } + }, 500) + + setAssignedWorkspaces(sortWorkspaces(sortedWorkspaces)) + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getWorkspacesByUserIdApi.data, user.activeWorkspaceId]) + + useEffect(() => { + if (switchWorkspaceApi.data) { + setIsSwitching(false) + store.dispatch(workspaceSwitchSuccess(switchWorkspaceApi.data)) + + // get the current path and navigate to the same after refresh + navigate('/', { replace: true }) + navigate(0) + } + }, [switchWorkspaceApi.data, navigate]) + + useEffect(() => { + if (switchWorkspaceApi.error) { + setIsSwitching(false) + setShowWorkspaceUnavailableDialog(false) + + // Set error message and show error dialog + setErrorMessage(switchWorkspaceApi.error.message || 'Failed to switch workspace') + setShowErrorDialog(true) + } + }, [switchWorkspaceApi.error]) + + useEffect(() => { + try { + if (logoutApi.data && logoutApi.data.message === 'logged_out') { + store.dispatch(logoutSuccess()) + window.location.href = logoutApi.data.redirectTo + } + } catch (e) { + console.error(e) + } + }, [logoutApi.data]) + + useEffect(() => { + setActiveWorkspace(user.activeWorkspace) + + prevOpen.current = open + }, [open, user]) + + const sortWorkspaces = (assignedWorkspaces) => { + // Sort workspaces alphabetically by name, with special characters last + const sortedWorkspaces = assignedWorkspaces + ? [...assignedWorkspaces].sort((a, b) => { + const isSpecialA = /^[^a-zA-Z0-9]/.test(a.name) + const isSpecialB = /^[^a-zA-Z0-9]/.test(b.name) + + // If one has special char and other doesn't, special char goes last + if (isSpecialA && !isSpecialB) return 1 + if (!isSpecialA && isSpecialB) return -1 + + // If both are special or both are not special, sort alphabetically + return a.name.localeCompare(b.name, undefined, { + numeric: true, + sensitivity: 'base' + }) + }) + : [] + return sortedWorkspaces + } + + return ( + <> + {isAuthenticated && + user && + assignedWorkspaces?.length > 1 && + !(assignedWorkspaces.length === 1 && user.activeWorkspace === 'Default Workspace') ? ( + <> + + + {assignedWorkspaces.map((item, index) => ( + { + switchWorkspace(item.id) + }} + key={index} + disableRipple + > + {item.id === user.activeWorkspaceId ? ( + <> + + + + {item.name} + + ) : ( + {item.name} + )} + + ))} + + + ) : null} + + + + + + Switching workspace... + + + + + + + + + Workspace Unavailable + + Your current workspace is no longer available. Please select another workspace to continue. + + + + + {assignedWorkspaces.length === 0 && ( + + + + )} + + + {/* Error Dialog */} + + + + Workspace Switch Error + {errorMessage} + {isEnterpriseLicensed && ( + + Please contact your administrator for assistance. + + )} + + + + + + + + ) +} + +WorkspaceSwitcher.propTypes = {} + +export default WorkspaceSwitcher diff --git a/packages/ui/src/layout/MainLayout/Header/index.jsx b/packages/ui/src/layout/MainLayout/Header/index.jsx index 53d6402bd..ae95c1eef 100644 --- a/packages/ui/src/layout/MainLayout/Header/index.jsx +++ b/packages/ui/src/layout/MainLayout/Header/index.jsx @@ -1,22 +1,35 @@ import PropTypes from 'prop-types' import { useSelector, useDispatch } from 'react-redux' -import { useState } from 'react' +import { useEffect, useState } from 'react' import { useNavigate } from 'react-router-dom' // material-ui -import { useTheme } from '@mui/material/styles' -import { Avatar, Box, ButtonBase, Switch } from '@mui/material' -import { styled } from '@mui/material/styles' +import { Button, Avatar, Box, ButtonBase, Switch, Typography, Link } from '@mui/material' +import { useTheme, styled, darken } from '@mui/material/styles' // project imports import LogoSection from '../LogoSection' import ProfileSection from './ProfileSection' +import WorkspaceSwitcher from '@/layout/MainLayout/Header/WorkspaceSwitcher' +import OrgWorkspaceBreadcrumbs from '@/layout/MainLayout/Header/OrgWorkspaceBreadcrumbs' +import PricingDialog from '@/ui-component/subscription/PricingDialog' // assets -import { IconMenu2 } from '@tabler/icons-react' +import { IconMenu2, IconX, IconSparkles } from '@tabler/icons-react' // store +import { store } from '@/store' import { SET_DARKMODE } from '@/store/actions' +import { useConfig } from '@/store/context/ConfigContext' +import { enqueueSnackbar as enqueueSnackbarAction, closeSnackbar as closeSnackbarAction } from '@/store/actions' +import { logoutSuccess } from '@/store/reducers/authSlice' + +// API +import accountApi from '@/api/account.api' + +// Hooks +import useApi from '@/hooks/useApi' +import useNotifier from '@/utils/useNotifier' // ==============================|| MAIN NAVBAR / HEADER ||============================== // @@ -67,14 +80,87 @@ const MaterialUISwitch = styled(Switch)(({ theme }) => ({ } })) +const GitHubStarButton = ({ starCount, isDark }) => { + const theme = useTheme() + + const formattedStarCount = starCount.toLocaleString() + + return ( + + + + + + + + Star + + + + + {formattedStarCount} + + + + + ) +} + +GitHubStarButton.propTypes = { + starCount: PropTypes.number.isRequired, + isDark: PropTypes.bool.isRequired +} + const Header = ({ handleLeftDrawerToggle }) => { const theme = useTheme() const navigate = useNavigate() const customization = useSelector((state) => state.customization) + const logoutApi = useApi(accountApi.logout) const [isDark, setIsDark] = useState(customization.isDarkMode) const dispatch = useDispatch() + const { isEnterpriseLicensed, isCloud, isOpenSource } = useConfig() + const currentUser = useSelector((state) => state.auth.user) + const isAuthenticated = useSelector((state) => state.auth.isAuthenticated) + const [isPricingOpen, setIsPricingOpen] = useState(false) + const [starCount, setStarCount] = useState(0) + + useNotifier() + + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) const changeDarkMode = () => { dispatch({ type: SET_DARKMODE, isDarkMode: !isDark }) @@ -83,15 +169,52 @@ const Header = ({ handleLeftDrawerToggle }) => { } const signOutClicked = () => { - localStorage.removeItem('username') - localStorage.removeItem('password') - navigate('/', { replace: true }) - navigate(0) + logoutApi.request() + enqueueSnackbar({ + message: 'Logging out...', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) } + useEffect(() => { + try { + if (logoutApi.data && logoutApi.data.message === 'logged_out') { + store.dispatch(logoutSuccess()) + window.location.href = logoutApi.data.redirectTo + } + } catch (e) { + console.error(e) + } + }, [logoutApi.data]) + + useEffect(() => { + if (isCloud || isOpenSource) { + const fetchStarCount = async () => { + try { + const response = await fetch('https://api.github.com/repos/FlowiseAI/Flowise') + const data = await response.json() + if (data.stargazers_count) { + setStarCount(data.stargazers_count) + } + } catch (error) { + setStarCount(0) + } + } + + fetchStarCount() + } + }, [isCloud, isOpenSource]) + return ( <> - {/* logo & toggler button */} { - - - - - + {isAuthenticated && ( + + + + + + )} - + {isCloud || isOpenSource ? ( + + + + ) : ( + + )} + {isEnterpriseLicensed && isAuthenticated && } + {isCloud && isAuthenticated && } + {isCloud && currentUser?.isOrganizationAdmin && ( + + )} + {isPricingOpen && isCloud && ( + { + setIsPricingOpen(false) + if (planUpdated) { + navigate('/') + navigate(0) + } + }} + /> + )} - + ) } diff --git a/packages/ui/src/layout/MainLayout/Sidebar/CloudMenuList.jsx b/packages/ui/src/layout/MainLayout/Sidebar/CloudMenuList.jsx new file mode 100644 index 000000000..61032a1c7 --- /dev/null +++ b/packages/ui/src/layout/MainLayout/Sidebar/CloudMenuList.jsx @@ -0,0 +1,111 @@ +import { useEffect } from 'react' +import { useDispatch, useSelector } from 'react-redux' +import { enqueueSnackbar as enqueueSnackbarAction, closeSnackbar as closeSnackbarAction } from '@/store/actions' +import { store } from '@/store' + +// material-ui +import { Divider, Box, Button, List, ListItemButton, ListItemIcon, Typography } from '@mui/material' +import { useTheme } from '@mui/material/styles' + +// project imports +import useNotifier from '@/utils/useNotifier' +import { useConfig } from '@/store/context/ConfigContext' + +// API +import { logoutSuccess } from '@/store/reducers/authSlice' + +// Hooks +import useApi from '@/hooks/useApi' + +// icons +import { IconFileText, IconLogout, IconX } from '@tabler/icons-react' +import accountApi from '@/api/account.api' + +const CloudMenuList = () => { + const customization = useSelector((state) => state.customization) + const dispatch = useDispatch() + useNotifier() + const theme = useTheme() + + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const logoutApi = useApi(accountApi.logout) + const { isCloud } = useConfig() + + const signOutClicked = () => { + logoutApi.request() + enqueueSnackbar({ + message: 'Logging out...', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + } + + useEffect(() => { + try { + if (logoutApi.data && logoutApi.data.message === 'logged_out') { + store.dispatch(logoutSuccess()) + window.location.href = logoutApi.data.redirectTo + } + } catch (e) { + console.error(e) + } + }, [logoutApi.data]) + + return ( + <> + {isCloud && ( + + + + + + + + + + Documentation + + + + + + + + + Logout + + + + + )} + + ) +} + +export default CloudMenuList diff --git a/packages/ui/src/layout/MainLayout/Sidebar/MenuList/NavGroup/index.jsx b/packages/ui/src/layout/MainLayout/Sidebar/MenuList/NavGroup/index.jsx index 0625d54e7..054f409c9 100644 --- a/packages/ui/src/layout/MainLayout/Sidebar/MenuList/NavGroup/index.jsx +++ b/packages/ui/src/layout/MainLayout/Sidebar/MenuList/NavGroup/index.jsx @@ -7,19 +7,25 @@ import { Divider, List, Typography } from '@mui/material' // project imports import NavItem from '../NavItem' import NavCollapse from '../NavCollapse' +import { useAuth } from '@/hooks/useAuth' +import { Available } from '@/ui-component/rbac/available' // ==============================|| SIDEBAR MENU LIST GROUP ||============================== // const NavGroup = ({ item }) => { const theme = useTheme() + const { hasPermission, hasDisplay } = useAuth() - // menu list collapse & items - const items = item.children?.map((menu) => { + const listItems = (menu, level = 1) => { + // Filter based on display and permission + if (!shouldDisplayMenu(menu)) return null + + // Handle item and group types switch (menu.type) { case 'collapse': - return + return case 'item': - return + return default: return ( @@ -27,7 +33,40 @@ const NavGroup = ({ item }) => { ) } - }) + } + + const shouldDisplayMenu = (menu) => { + // Handle permission check + if (menu.permission && !hasPermission(menu.permission)) { + return false // Do not render if permission is lacking + } + + // If `display` is defined, check against cloud/enterprise conditions + if (menu.display) { + const shouldsiplay = hasDisplay(menu.display) + return shouldsiplay + } + + // If `display` is not defined, display by default + return true + } + + const renderPrimaryItems = () => { + const primaryGroup = item.children.find((child) => child.id === 'primary') + return primaryGroup.children + } + + const renderNonPrimaryGroups = () => { + let nonprimaryGroups = item.children.filter((child) => child.id !== 'primary') + // Display children based on permission and display + nonprimaryGroups = nonprimaryGroups.map((group) => { + const children = group.children.filter((menu) => shouldDisplayMenu(menu)) + return { ...group, children } + }) + // Get rid of group with empty children + nonprimaryGroups = nonprimaryGroups.filter((group) => group.children.length > 0) + return nonprimaryGroups + } return ( <> @@ -44,13 +83,31 @@ const NavGroup = ({ item }) => { ) } - sx={{ py: '20px' }} + sx={{ p: '16px', py: 2, display: 'flex', flexDirection: 'column', gap: 1 }} > - {items} + {renderPrimaryItems().map((menu) => listItems(menu))} - {/* group divider */} - + {renderNonPrimaryGroups().map((group) => { + const groupPermissions = group.children.map((menu) => menu.permission).join(',') + return ( + + <> + + + {group.title} + + } + sx={{ p: '16px', py: 2, display: 'flex', flexDirection: 'column', gap: 1 }} + > + {group.children.map((menu) => listItems(menu))} + + + + ) + })} ) } diff --git a/packages/ui/src/layout/MainLayout/Sidebar/MenuList/NavItem/index.jsx b/packages/ui/src/layout/MainLayout/Sidebar/MenuList/NavItem/index.jsx index 91a4fcf73..10445554b 100644 --- a/packages/ui/src/layout/MainLayout/Sidebar/MenuList/NavItem/index.jsx +++ b/packages/ui/src/layout/MainLayout/Sidebar/MenuList/NavItem/index.jsx @@ -101,7 +101,6 @@ const NavItem = ({ item, level, navType, onClick, onUploadFile }) => { disabled={item.disabled} sx={{ borderRadius: `${customization.borderRadius}px`, - mb: 0.5, alignItems: 'flex-start', backgroundColor: level > 1 ? 'transparent !important' : 'inherit', py: level > 1 ? 1 : 1.25, diff --git a/packages/ui/src/layout/MainLayout/Sidebar/MenuList/index.jsx b/packages/ui/src/layout/MainLayout/Sidebar/MenuList/index.jsx index 1473d8a81..6dfb1a3c9 100644 --- a/packages/ui/src/layout/MainLayout/Sidebar/MenuList/index.jsx +++ b/packages/ui/src/layout/MainLayout/Sidebar/MenuList/index.jsx @@ -1,14 +1,14 @@ // material-ui -import { Typography } from '@mui/material' +import { Box, Typography } from '@mui/material' // project imports import NavGroup from './NavGroup' -import menuItem from '@/menu-items' +import { menuItems } from '@/menu-items' // ==============================|| SIDEBAR MENU LIST ||============================== // const MenuList = () => { - const navItems = menuItem.items.map((item) => { + const navItems = menuItems.items.map((item) => { switch (item.type) { case 'group': return @@ -21,7 +21,7 @@ const MenuList = () => { } }) - return <>{navItems} + return {navItems} } export default MenuList diff --git a/packages/ui/src/layout/MainLayout/Sidebar/TrialInfo.jsx b/packages/ui/src/layout/MainLayout/Sidebar/TrialInfo.jsx new file mode 100644 index 000000000..0e63ce786 --- /dev/null +++ b/packages/ui/src/layout/MainLayout/Sidebar/TrialInfo.jsx @@ -0,0 +1,58 @@ +import { Box, Skeleton, Typography } from '@mui/material' +import { useTheme } from '@mui/material/styles' +import PropTypes from 'prop-types' +import { StyledButton } from '@/ui-component/button/StyledButton' + +const TrialInfo = ({ billingPortalUrl, isLoading, paymentMethodExists, trialDaysLeft }) => { + const theme = useTheme() + + return ( + + {isLoading ? ( + + + + + ) : ( + <> + + There are{' '} + + {trialDaysLeft} days left + {' '} + in your trial. {!paymentMethodExists ? 'Update your payment method to avoid service interruption.' : ''} + + {!paymentMethodExists && ( + + + Update Payment Method + + + )} + + )} + + ) +} + +TrialInfo.propTypes = { + billingPortalUrl: PropTypes.string, + isLoading: PropTypes.bool, + paymentMethodExists: PropTypes.bool, + trialDaysLeft: PropTypes.number +} + +export default TrialInfo diff --git a/packages/ui/src/layout/MainLayout/Sidebar/index.jsx b/packages/ui/src/layout/MainLayout/Sidebar/index.jsx index 5d1908c8b..b05d6669e 100644 --- a/packages/ui/src/layout/MainLayout/Sidebar/index.jsx +++ b/packages/ui/src/layout/MainLayout/Sidebar/index.jsx @@ -1,4 +1,5 @@ import PropTypes from 'prop-types' +import { useSelector } from 'react-redux' // material-ui import { useTheme } from '@mui/material/styles' @@ -11,6 +12,9 @@ import { BrowserView, MobileView } from 'react-device-detect' // project imports import MenuList from './MenuList' import LogoSection from '../LogoSection' +import CloudMenuList from '@/layout/MainLayout/Sidebar/CloudMenuList' + +// store import { drawerWidth, headerHeight } from '@/store/constant' // ==============================|| SIDEBAR DRAWER ||============================== // @@ -18,6 +22,7 @@ import { drawerWidth, headerHeight } from '@/store/constant' const Sidebar = ({ drawerOpen, drawerToggle, window }) => { const theme = useTheme() const matchUpMd = useMediaQuery(theme.breakpoints.up('md')) + const isAuthenticated = useSelector((state) => state.auth.isAuthenticated) const drawer = ( <> @@ -36,16 +41,18 @@ const Sidebar = ({ drawerOpen, drawerToggle, window }) => { component='div' style={{ height: !matchUpMd ? 'calc(100vh - 56px)' : `calc(100vh - ${headerHeight}px)`, - paddingLeft: '16px', - paddingRight: '16px' + display: 'flex', + flexDirection: 'column' }} > + + @@ -62,30 +69,31 @@ const Sidebar = ({ drawerOpen, drawerToggle, window }) => { }} aria-label='mailbox folders' > - - {drawer} - + {isAuthenticated && ( + + {drawer} + + )} ) } diff --git a/packages/ui/src/layout/MainLayout/index.jsx b/packages/ui/src/layout/MainLayout/index.jsx index 236d27c97..cab73c15a 100644 --- a/packages/ui/src/layout/MainLayout/index.jsx +++ b/packages/ui/src/layout/MainLayout/index.jsx @@ -86,7 +86,7 @@ const MainLayout = () => { transition: leftDrawerOpened ? theme.transitions.create('width') : 'none' }} > - +
diff --git a/packages/ui/src/menu-items/agentsettings.js b/packages/ui/src/menu-items/agentsettings.js index 741ce0322..7e7eb8dae 100644 --- a/packages/ui/src/menu-items/agentsettings.js +++ b/packages/ui/src/menu-items/agentsettings.js @@ -50,42 +50,48 @@ const agent_settings = { title: 'Configuration', type: 'item', url: '', - icon: icons.IconAdjustmentsHorizontal + icon: icons.IconAdjustmentsHorizontal, + permission: 'agentflows:config' }, { id: 'saveAsTemplate', title: 'Save As Template', type: 'item', url: '', - icon: icons.IconTemplate + icon: icons.IconTemplate, + permission: 'templates:flowexport' }, { id: 'duplicateChatflow', title: 'Duplicate Agents', type: 'item', url: '', - icon: icons.IconCopy + icon: icons.IconCopy, + permission: 'agentflows:duplicate' }, { id: 'loadChatflow', title: 'Load Agents', type: 'item', url: '', - icon: icons.IconFileUpload + icon: icons.IconFileUpload, + permission: 'agentflows:import' }, { id: 'exportChatflow', title: 'Export Agents', type: 'item', url: '', - icon: icons.IconFileExport + icon: icons.IconFileExport, + permission: 'agentflows:export' }, { id: 'deleteChatflow', title: 'Delete Agents', type: 'item', url: '', - icon: icons.IconTrash + icon: icons.IconTrash, + permission: 'agentflows:delete' } ] } diff --git a/packages/ui/src/menu-items/customassistant.js b/packages/ui/src/menu-items/customassistant.js index 328b4adfc..dba00fbd4 100644 --- a/packages/ui/src/menu-items/customassistant.js +++ b/packages/ui/src/menu-items/customassistant.js @@ -35,14 +35,16 @@ const customAssistantSettings = { title: 'Configuration', type: 'item', url: '', - icon: icons.IconAdjustmentsHorizontal + icon: icons.IconAdjustmentsHorizontal, + permission: 'assistants:update' }, { id: 'deleteAssistant', title: 'Delete Assistant', type: 'item', url: '', - icon: icons.IconTrash + icon: icons.IconTrash, + permission: 'assistants:delete' } ] } diff --git a/packages/ui/src/menu-items/dashboard.js b/packages/ui/src/menu-items/dashboard.js index c9af49863..a320d0ca3 100644 --- a/packages/ui/src/menu-items/dashboard.js +++ b/packages/ui/src/menu-items/dashboard.js @@ -1,5 +1,6 @@ // assets import { + IconList, IconUsersGroup, IconHierarchy, IconBuildingStore, @@ -7,23 +8,50 @@ import { IconTool, IconLock, IconRobot, + IconSettings, IconVariable, IconFiles, + IconTestPipe, + IconMicroscope, + IconDatabase, + IconChartHistogram, + IconUserEdit, + IconFileUpload, + IconClipboardList, + IconStack2, + IconUsers, + IconLockCheck, + IconFileDatabase, + IconShieldLock, IconListCheck } from '@tabler/icons-react' // constant const icons = { - IconListCheck, - IconUsersGroup, IconHierarchy, + IconUsersGroup, IconBuildingStore, + IconList, IconKey, IconTool, IconLock, IconRobot, + IconSettings, IconVariable, - IconFiles + IconFiles, + IconTestPipe, + IconMicroscope, + IconDatabase, + IconUserEdit, + IconChartHistogram, + IconFileUpload, + IconClipboardList, + IconStack2, + IconUsers, + IconLockCheck, + IconFileDatabase, + IconShieldLock, + IconListCheck } // ==============================|| DASHBOARD MENU ITEMS ||============================== // @@ -34,84 +62,230 @@ const dashboard = { type: 'group', children: [ { - id: 'chatflows', - title: 'Chatflows', - type: 'item', - url: '/chatflows', - icon: icons.IconHierarchy, - breadcrumbs: true + id: 'primary', + title: '', + type: 'group', + children: [ + { + id: 'chatflows', + title: 'Chatflows', + type: 'item', + url: '/chatflows', + icon: icons.IconHierarchy, + breadcrumbs: true, + permission: 'chatflows:view' + }, + { + id: 'agentflows', + title: 'Agentflows', + type: 'item', + url: '/agentflows', + icon: icons.IconUsersGroup, + breadcrumbs: true, + permission: 'agentflows:view' + }, + { + id: 'executions', + title: 'Executions', + type: 'item', + url: '/executions', + icon: icons.IconListCheck, + breadcrumbs: true, + permission: 'executions:view' + }, + { + id: 'assistants', + title: 'Assistants', + type: 'item', + url: '/assistants', + icon: icons.IconRobot, + breadcrumbs: true, + permission: 'assistants:view' + }, + { + id: 'marketplaces', + title: 'Marketplaces', + type: 'item', + url: '/marketplaces', + icon: icons.IconBuildingStore, + breadcrumbs: true, + permission: 'templates:marketplace,templates:custom' + }, + { + id: 'tools', + title: 'Tools', + type: 'item', + url: '/tools', + icon: icons.IconTool, + breadcrumbs: true, + permission: 'tools:view' + }, + { + id: 'credentials', + title: 'Credentials', + type: 'item', + url: '/credentials', + icon: icons.IconLock, + breadcrumbs: true, + permission: 'credentials:view' + }, + { + id: 'variables', + title: 'Variables', + type: 'item', + url: '/variables', + icon: icons.IconVariable, + breadcrumbs: true, + permission: 'variables:view' + }, + { + id: 'apikey', + title: 'API Keys', + type: 'item', + url: '/apikey', + icon: icons.IconKey, + breadcrumbs: true, + permission: 'apikeys:view' + }, + { + id: 'document-stores', + title: 'Document Stores', + type: 'item', + url: '/document-stores', + icon: icons.IconFiles, + breadcrumbs: true, + permission: 'documentStores:view' + } + ] }, { - id: 'agentflows', - title: 'Agentflows', - type: 'item', - url: '/agentflows', - icon: icons.IconUsersGroup, - breadcrumbs: true + id: 'evaluations', + title: 'Evaluations', + type: 'group', + children: [ + { + id: 'datasets', + title: 'Datasets', + type: 'item', + url: '/datasets', + icon: icons.IconDatabase, + breadcrumbs: true, + display: 'feat:datasets', + permission: 'datasets:view' + }, + { + id: 'evaluators', + title: 'Evaluators', + type: 'item', + url: '/evaluators', + icon: icons.IconTestPipe, + breadcrumbs: true, + display: 'feat:evaluators', + permission: 'evaluators:view' + }, + { + id: 'evaluations', + title: 'Evaluations', + type: 'item', + url: '/evaluations', + icon: icons.IconChartHistogram, + breadcrumbs: true, + display: 'feat:evaluations', + permission: 'evaluations:view' + } + ] }, { - id: 'executions', - title: 'Executions', - type: 'item', - url: '/executions', - icon: icons.IconListCheck, - breadcrumbs: true + id: 'management', + title: 'User & Workspace Management', + type: 'group', + children: [ + { + id: 'sso', + title: 'SSO Config', + type: 'item', + url: '/sso-config', + icon: icons.IconShieldLock, + breadcrumbs: true, + display: 'feat:sso-config', + permission: 'sso:manage' + }, + { + id: 'roles', + title: 'Roles', + type: 'item', + url: '/roles', + icon: icons.IconLockCheck, + breadcrumbs: true, + display: 'feat:roles', + permission: 'roles:manage' + }, + { + id: 'users', + title: 'Users', + type: 'item', + url: '/users', + icon: icons.IconUsers, + breadcrumbs: true, + display: 'feat:users', + permission: 'users:manage' + }, + { + id: 'workspaces', + title: 'Workspaces', + type: 'item', + url: '/workspaces', + icon: icons.IconStack2, + breadcrumbs: true, + display: 'feat:workspaces', + permission: 'workspace:view' + }, + { + id: 'login-activity', + title: 'Login Activity', + type: 'item', + url: '/login-activity', + icon: icons.IconClipboardList, + breadcrumbs: true, + display: 'feat:login-activity', + permission: 'loginActivity:view' + } + ] }, { - id: 'assistants', - title: 'Assistants', - type: 'item', - url: '/assistants', - icon: icons.IconRobot, - breadcrumbs: true - }, - { - id: 'marketplaces', - title: 'Marketplaces', - type: 'item', - url: '/marketplaces', - icon: icons.IconBuildingStore, - breadcrumbs: true - }, - { - id: 'tools', - title: 'Tools', - type: 'item', - url: '/tools', - icon: icons.IconTool, - breadcrumbs: true - }, - { - id: 'credentials', - title: 'Credentials', - type: 'item', - url: '/credentials', - icon: icons.IconLock, - breadcrumbs: true - }, - { - id: 'variables', - title: 'Variables', - type: 'item', - url: '/variables', - icon: icons.IconVariable, - breadcrumbs: true - }, - { - id: 'apikey', - title: 'API Keys', - type: 'item', - url: '/apikey', - icon: icons.IconKey, - breadcrumbs: true - }, - { - id: 'document-stores', - title: 'Document Stores', - type: 'item', - url: '/document-stores', - icon: icons.IconFiles, - breadcrumbs: true + id: 'others', + title: 'Others', + type: 'group', + children: [ + { + id: 'logs', + title: 'Logs', + type: 'item', + url: '/logs', + icon: icons.IconList, + breadcrumbs: true, + display: 'feat:logs', + permission: 'logs:view' + }, + // { + // id: 'files', + // title: 'Files', + // type: 'item', + // url: '/files', + // icon: icons.IconFileDatabase, + // breadcrumbs: true, + // display: 'feat:files', + // }, + { + id: 'account', + title: 'Account Settings', + type: 'item', + url: '/account', + icon: icons.IconSettings, + breadcrumbs: true, + display: 'feat:account' + } + ] } ] } diff --git a/packages/ui/src/menu-items/index.js b/packages/ui/src/menu-items/index.js index bad835afb..533f2c6cf 100644 --- a/packages/ui/src/menu-items/index.js +++ b/packages/ui/src/menu-items/index.js @@ -2,8 +2,6 @@ import dashboard from './dashboard' // ==============================|| MENU ITEMS ||============================== // -const menuItems = { +export const menuItems = { items: [dashboard] } - -export default menuItems diff --git a/packages/ui/src/menu-items/settings.js b/packages/ui/src/menu-items/settings.js index 94ff397c3..01b4f5378 100644 --- a/packages/ui/src/menu-items/settings.js +++ b/packages/ui/src/menu-items/settings.js @@ -57,6 +57,7 @@ const settings = { title: 'Configuration', type: 'item', url: '', + permission: 'chatflows:config', icon: icons.IconAdjustmentsHorizontal }, { @@ -64,35 +65,40 @@ const settings = { title: 'Save As Template', type: 'item', url: '', - icon: icons.IconTemplate + icon: icons.IconTemplate, + permission: 'templates:flowexport' }, { id: 'duplicateChatflow', title: 'Duplicate Chatflow', type: 'item', url: '', - icon: icons.IconCopy + icon: icons.IconCopy, + permission: 'chatflows:duplicate' }, { id: 'loadChatflow', title: 'Load Chatflow', type: 'item', url: '', - icon: icons.IconFileUpload + icon: icons.IconFileUpload, + permission: 'chatflows:import' }, { id: 'exportChatflow', title: 'Export Chatflow', type: 'item', url: '', - icon: icons.IconFileExport + icon: icons.IconFileExport, + permission: 'chatflows:export' }, { id: 'deleteChatflow', title: 'Delete Chatflow', type: 'item', url: '', - icon: icons.IconTrash + icon: icons.IconTrash, + permission: 'chatflows:delete' } ] } diff --git a/packages/ui/src/routes/AuthRoutes.jsx b/packages/ui/src/routes/AuthRoutes.jsx new file mode 100644 index 000000000..2d63fc387 --- /dev/null +++ b/packages/ui/src/routes/AuthRoutes.jsx @@ -0,0 +1,59 @@ +import { lazy } from 'react' + +import Loadable from '@/ui-component/loading/Loadable' +import AuthLayout from '@/layout/AuthLayout' + +const ResolveLoginPage = Loadable(lazy(() => import('@/views/auth/login'))) +const SignInPage = Loadable(lazy(() => import('@/views/auth/signIn'))) +const RegisterPage = Loadable(lazy(() => import('@/views/auth/register'))) +const VerifyEmailPage = Loadable(lazy(() => import('@/views/auth/verify-email'))) +const ForgotPasswordPage = Loadable(lazy(() => import('@/views/auth/forgotPassword'))) +const ResetPasswordPage = Loadable(lazy(() => import('@/views/auth/resetPassword'))) +const UnauthorizedPage = Loadable(lazy(() => import('@/views/auth/unauthorized'))) +const OrganizationSetupPage = Loadable(lazy(() => import('@/views/organization/index'))) +const LicenseExpiredPage = Loadable(lazy(() => import('@/views/auth/expired'))) + +const AuthRoutes = { + path: '/', + element: , + children: [ + { + path: '/login', + element: + }, + { + path: '/signin', + element: + }, + { + path: '/register', + element: + }, + { + path: '/verify', + element: + }, + { + path: '/forgot-password', + element: + }, + { + path: '/reset-password', + element: + }, + { + path: '/unauthorized', + element: + }, + { + path: '/organization-setup', + element: + }, + { + path: '/license-expired', + element: + } + ] +} + +export default AuthRoutes diff --git a/packages/ui/src/routes/CanvasRoutes.jsx b/packages/ui/src/routes/CanvasRoutes.jsx index ead159205..e90a96249 100644 --- a/packages/ui/src/routes/CanvasRoutes.jsx +++ b/packages/ui/src/routes/CanvasRoutes.jsx @@ -3,6 +3,7 @@ import { lazy } from 'react' // project imports import Loadable from '@/ui-component/loading/Loadable' import MinimalLayout from '@/layout/MinimalLayout' +import { RequireAuth } from '@/routes/RequireAuth' // canvas routing const Canvas = Loadable(lazy(() => import('@/views/canvas'))) @@ -18,35 +19,67 @@ const CanvasRoutes = { children: [ { path: '/canvas', - element: + element: ( + + + + ) }, { path: '/canvas/:id', - element: + element: ( + + + + ) }, { path: '/agentcanvas', - element: + element: ( + + + + ) }, { path: '/agentcanvas/:id', - element: + element: ( + + + + ) }, { path: '/v2/agentcanvas', - element: + element: ( + + + + ) }, { path: '/v2/agentcanvas/:id', - element: + element: ( + + + + ) }, { path: '/marketplace/:id', - element: + element: ( + + + + ) }, { path: '/v2/marketplace/:id', - element: + element: ( + + + + ) } ] } diff --git a/packages/ui/src/routes/DefaultRedirect.jsx b/packages/ui/src/routes/DefaultRedirect.jsx new file mode 100644 index 000000000..6af3d1069 --- /dev/null +++ b/packages/ui/src/routes/DefaultRedirect.jsx @@ -0,0 +1,100 @@ +import { useAuth } from '@/hooks/useAuth' +import { useConfig } from '@/store/context/ConfigContext' +import { useSelector } from 'react-redux' + +// Import all view components +import Account from '@/views/account' +import Executions from '@/views/agentexecutions' +import Agentflows from '@/views/agentflows' +import APIKey from '@/views/apikey' +import Assistants from '@/views/assistants' +import Login from '@/views/auth/login' +import LoginActivityPage from '@/views/auth/loginActivity' +import SSOConfig from '@/views/auth/ssoConfig' +import Unauthorized from '@/views/auth/unauthorized' +import Chatflows from '@/views/chatflows' +import Credentials from '@/views/credentials' +import EvalDatasets from '@/views/datasets' +import Documents from '@/views/docstore' +import EvalEvaluation from '@/views/evaluations/index' +import Evaluators from '@/views/evaluators' +import Marketplaces from '@/views/marketplaces' +import RolesPage from '@/views/roles' +import Logs from '@/views/serverlogs' +import Tools from '@/views/tools' +import UsersPage from '@/views/users' +import Variables from '@/views/variables' +import Workspaces from '@/views/workspace' + +/** + * Component that redirects users to the first accessible page based on their permissions + * This prevents 403 errors when users don't have access to the default chatflows page + */ +export const DefaultRedirect = () => { + const { hasPermission, hasDisplay } = useAuth() + const { isOpenSource } = useConfig() + const isGlobal = useSelector((state) => state.auth.isGlobal) + const isAuthenticated = useSelector((state) => state.auth.isAuthenticated) + + // Define the order of routes to check (based on the menu order in dashboard.js) + const routesToCheck = [ + { component: Chatflows, permission: 'chatflows:view' }, + { component: Agentflows, permission: 'agentflows:view' }, + { component: Executions, permission: 'executions:view' }, + { component: Assistants, permission: 'assistants:view' }, + { component: Marketplaces, permission: 'templates:marketplace,templates:custom' }, + { component: Tools, permission: 'tools:view' }, + { component: Credentials, permission: 'credentials:view' }, + { component: Variables, permission: 'variables:view' }, + { component: APIKey, permission: 'apikeys:view' }, + { component: Documents, permission: 'documentStores:view' }, + // Evaluation routes (with display flags) + { component: EvalDatasets, permission: 'datasets:view', display: 'feat:datasets' }, + { component: Evaluators, permission: 'evaluators:view', display: 'feat:evaluators' }, + { component: EvalEvaluation, permission: 'evaluations:view', display: 'feat:evaluations' }, + // Management routes (with display flags) + { component: SSOConfig, permission: 'sso:manage', display: 'feat:sso-config' }, + { component: RolesPage, permission: 'roles:manage', display: 'feat:roles' }, + { component: UsersPage, permission: 'users:manage', display: 'feat:users' }, + { component: Workspaces, permission: 'workspace:view', display: 'feat:workspaces' }, + { component: LoginActivityPage, permission: 'loginActivity:view', display: 'feat:login-activity' }, + // Other routes + { component: Logs, permission: 'logs:view', display: 'feat:logs' }, + { component: Account, display: 'feat:account' } + ] + + // If user is not authenticated, show login page + if (!isAuthenticated) { + return + } + + // For open source, show chatflows (no permission checks) + if (isOpenSource) { + return + } + + // For global admins, show chatflows (they have access to everything) + if (isGlobal) { + return + } + + // Check each route in order and return the first accessible component + for (const route of routesToCheck) { + const { component: Component, permission, display } = route + + // Check permission if specified + const hasRequiredPermission = !permission || hasPermission(permission) + + // Check display flag if specified + const hasRequiredDisplay = !display || hasDisplay(display) + + // If user has both required permission and display access, return this component + if (hasRequiredPermission && hasRequiredDisplay) { + return + } + } + + // If no accessible routes found, show unauthorized page + // This should rarely happen as most users should have at least one permission + return +} diff --git a/packages/ui/src/routes/MainRoutes.jsx b/packages/ui/src/routes/MainRoutes.jsx index f50873de3..544433db6 100644 --- a/packages/ui/src/routes/MainRoutes.jsx +++ b/packages/ui/src/routes/MainRoutes.jsx @@ -4,6 +4,9 @@ import { lazy } from 'react' import MainLayout from '@/layout/MainLayout' import Loadable from '@/ui-component/loading/Loadable' +import { RequireAuth } from '@/routes/RequireAuth' +import { DefaultRedirect } from '@/routes/DefaultRedirect' + // chatflows routing const Chatflows = Loadable(lazy(() => import('@/views/chatflows'))) @@ -39,9 +42,34 @@ const LoaderConfigPreviewChunks = Loadable(lazy(() => import('@/views/docstore/L const VectorStoreConfigure = Loadable(lazy(() => import('@/views/docstore/VectorStoreConfigure'))) const VectorStoreQuery = Loadable(lazy(() => import('@/views/docstore/VectorStoreQuery'))) -// execution routing +// Evaluations routing +const EvalEvaluation = Loadable(lazy(() => import('@/views/evaluations/index'))) +const EvaluationResult = Loadable(lazy(() => import('@/views/evaluations/EvaluationResult'))) +const EvalDatasetRows = Loadable(lazy(() => import('@/views/datasets/DatasetItems'))) +const EvalDatasets = Loadable(lazy(() => import('@/views/datasets'))) +const Evaluators = Loadable(lazy(() => import('@/views/evaluators'))) + +// account routing +const Account = Loadable(lazy(() => import('@/views/account'))) + +// files routing +const Files = Loadable(lazy(() => import('@/views/files'))) + +// logs routing +const Logs = Loadable(lazy(() => import('@/views/serverlogs'))) + +// executions routing const Executions = Loadable(lazy(() => import('@/views/agentexecutions'))) +// enterprise features +const UsersPage = Loadable(lazy(() => import('@/views/users'))) +const RolesPage = Loadable(lazy(() => import('@/views/roles'))) +const LoginActivityPage = Loadable(lazy(() => import('@/views/auth/loginActivity'))) +const Workspaces = Loadable(lazy(() => import('@/views/workspace'))) +const WorkspaceDetails = Loadable(lazy(() => import('@/views/workspace/WorkspaceUsers'))) +const SSOConfig = Loadable(lazy(() => import('@/views/auth/ssoConfig'))) +const SSOSuccess = Loadable(lazy(() => import('@/views/auth/ssoSuccess'))) + // ==============================|| MAIN ROUTING ||============================== // const MainRoutes = { @@ -50,83 +78,271 @@ const MainRoutes = { children: [ { path: '/', - element: + element: }, { path: '/chatflows', - element: + element: ( + + + + ) }, { path: '/agentflows', - element: + element: ( + + + + ) }, { path: '/executions', - element: + element: ( + + + + ) }, { path: '/marketplaces', - element: + element: ( + + + + ) }, { path: '/apikey', - element: + element: ( + + + + ) }, { path: '/tools', - element: + element: ( + + + + ) }, { path: '/assistants', - element: + element: ( + + + + ) }, { path: '/assistants/custom', - element: + element: ( + + + + ) }, { path: '/assistants/custom/:id', - element: + element: ( + + + + ) }, { path: '/assistants/openai', - element: + element: ( + + + + ) }, { path: '/credentials', - element: + element: ( + + + + ) }, { path: '/variables', - element: + element: ( + + + + ) }, { path: '/document-stores', - element: + element: ( + + + + ) }, { path: '/document-stores/:storeId', - element: + element: ( + + + + ) }, { path: '/document-stores/chunks/:storeId/:fileId', - element: + element: ( + + + + ) }, { path: '/document-stores/:storeId/:name', - element: + element: ( + + + + ) }, { path: '/document-stores/vector/:storeId', - element: + element: ( + + + + ) }, { path: '/document-stores/vector/:storeId/:docId', - element: + element: ( + + + + ) }, { path: '/document-stores/query/:storeId', - element: + element: ( + + + + ) + }, + { + path: '/datasets', + element: ( + + + + ) + }, + { + path: '/dataset_rows/:id', + element: ( + + + + ) + }, + { + path: '/evaluations', + element: ( + + + + ) + }, + { + path: '/evaluation_results/:id', + element: ( + + + + ) + }, + { + path: '/evaluators', + element: ( + + + + ) + }, + { + path: '/logs', + element: ( + + + + ) + }, + { + path: '/files', + element: ( + + + + ) + }, + { + path: '/account', + element: + }, + { + path: '/users', + element: ( + + + + ) + }, + { + path: '/roles', + element: ( + + + + ) + }, + { + path: '/login-activity', + element: ( + + + + ) + }, + { + path: '/workspaces', + element: ( + + + + ) + }, + { + path: '/workspace-users/:id', + element: ( + + + + ) + }, + { + path: '/sso-config', + element: ( + + + + ) + }, + { + path: '/sso-success', + element: } ] } diff --git a/packages/ui/src/routes/RequireAuth.jsx b/packages/ui/src/routes/RequireAuth.jsx new file mode 100644 index 000000000..b02c1ef7a --- /dev/null +++ b/packages/ui/src/routes/RequireAuth.jsx @@ -0,0 +1,94 @@ +import { useAuth } from '@/hooks/useAuth' +import { useConfig } from '@/store/context/ConfigContext' +import PropTypes from 'prop-types' +import { useSelector } from 'react-redux' +import { Navigate } from 'react-router' +import { useLocation } from 'react-router-dom' + +/** + * Checks if a feature flag is enabled + * @param {Object} features - Feature flags object + * @param {string} display - Feature flag key to check + * @param {React.ReactElement} children - Components to render if feature is enabled + * @returns {React.ReactElement} Children or unauthorized redirect + */ +const checkFeatureFlag = (features, display, children) => { + // Validate features object exists and is properly formatted + if (!features || Array.isArray(features) || Object.keys(features).length === 0) { + return + } + + // Check if feature flag exists and is enabled + if (Object.hasOwnProperty.call(features, display)) { + const isFeatureEnabled = features[display] === 'true' || features[display] === true + return isFeatureEnabled ? children : + } + + return +} + +export const RequireAuth = ({ permission, display, children }) => { + const location = useLocation() + const { isCloud, isOpenSource, isEnterpriseLicensed, loading } = useConfig() + const { hasPermission } = useAuth() + const isGlobal = useSelector((state) => state.auth.isGlobal) + const currentUser = useSelector((state) => state.auth.user) + const features = useSelector((state) => state.auth.features) + const permissions = useSelector((state) => state.auth.permissions) + + // Step 0: Wait for config to load + if (loading) { + return null + } + + // Step 1: Authentication Check + // Redirect to login if user is not authenticated + if (!currentUser) { + return + } + + // Step 2: Deployment Type Specific Logic + // Open Source: Only show features without display property + if (isOpenSource) { + return !display ? children : + } + + // Cloud & Enterprise: Check both permissions and feature flags + if (isCloud || isEnterpriseLicensed) { + // Routes with display property - check feature flags + if (display) { + // Check if user has any permissions + if (permissions.length === 0) { + return + } + + // Organization admins bypass permission checks + if (isGlobal) { + return checkFeatureFlag(features, display, children) + } + + // Check user permissions and feature flags + if (!permission || hasPermission(permission)) { + return checkFeatureFlag(features, display, children) + } + + return + } + + // Standard routes: check permissions (global admins bypass) + if (permission && !hasPermission(permission) && !isGlobal) { + return + } + + return children + } + + // Fallback: If none of the platform types match, deny access + return +} + +RequireAuth.propTypes = { + permission: PropTypes.string, + display: PropTypes.string, + children: PropTypes.element +} diff --git a/packages/ui/src/routes/index.jsx b/packages/ui/src/routes/index.jsx index 3d40f2f9a..7a5d76f1b 100644 --- a/packages/ui/src/routes/index.jsx +++ b/packages/ui/src/routes/index.jsx @@ -4,11 +4,12 @@ import { useRoutes } from 'react-router-dom' import MainRoutes from './MainRoutes' import CanvasRoutes from './CanvasRoutes' import ChatbotRoutes from './ChatbotRoutes' -import ExecutionRoutes from './ExecutionRoutes' import config from '@/config' +import AuthRoutes from '@/routes/AuthRoutes' +import ExecutionRoutes from './ExecutionRoutes' // ==============================|| ROUTING RENDER ||============================== // export default function ThemeRoutes() { - return useRoutes([MainRoutes, CanvasRoutes, ChatbotRoutes, ExecutionRoutes], config.basename) + return useRoutes([MainRoutes, AuthRoutes, CanvasRoutes, ChatbotRoutes, ExecutionRoutes], config.basename) } diff --git a/packages/ui/src/store/actions.js b/packages/ui/src/store/actions.js index 0c68f8f23..c6ace4a64 100644 --- a/packages/ui/src/store/actions.js +++ b/packages/ui/src/store/actions.js @@ -32,6 +32,11 @@ export const enqueueSnackbar = (notification) => { type: ENQUEUE_SNACKBAR, notification: { ...notification, + options: { + ...notification.options, + persist: notification.options?.persist ?? false, // Default: auto-close enabled + autoHideDuration: notification.options?.autoHideDuration ?? 5000 // Default auto-close duration: 5 seconds + }, key: key || new Date().getTime() + Math.random() } } diff --git a/packages/ui/src/store/constant.js b/packages/ui/src/store/constant.js index 15a468955..627959fa1 100644 --- a/packages/ui/src/store/constant.js +++ b/packages/ui/src/store/constant.js @@ -26,6 +26,17 @@ export const baseURL = import.meta.env.VITE_API_BASE_URL || window.location.orig export const uiBaseURL = import.meta.env.VITE_UI_BASE_URL || window.location.origin export const FLOWISE_CREDENTIAL_ID = 'FLOWISE_CREDENTIAL_ID' export const REDACTED_CREDENTIAL_VALUE = '_FLOWISE_BLANK_07167752-1a71-43b1-bf8f-4f32252165db' +export const ErrorMessage = { + INVALID_MISSING_TOKEN: 'Invalid or Missing token', + TOKEN_EXPIRED: 'Token Expired', + REFRESH_TOKEN_EXPIRED: 'Refresh Token Expired', + FORBIDDEN: 'Forbidden', + UNKNOWN_USER: 'Unknown Username or Password', + INCORRECT_PASSWORD: 'Incorrect Password', + INACTIVE_USER: 'Inactive User', + INVALID_WORKSPACE: 'No Workspace Assigned', + UNKNOWN_ERROR: 'Unknown Error' +} export const AGENTFLOW_ICONS = [ { name: 'conditionAgentflow', diff --git a/packages/ui/src/store/context/ConfigContext.jsx b/packages/ui/src/store/context/ConfigContext.jsx new file mode 100644 index 000000000..a11ef2817 --- /dev/null +++ b/packages/ui/src/store/context/ConfigContext.jsx @@ -0,0 +1,55 @@ +import platformsettingsApi from '@/api/platformsettings' +import PropTypes from 'prop-types' +import { createContext, useContext, useEffect, useState } from 'react' + +const ConfigContext = createContext() + +export const ConfigProvider = ({ children }) => { + const [config, setConfig] = useState({}) + const [loading, setLoading] = useState(true) + const [isEnterpriseLicensed, setEnterpriseLicensed] = useState(false) + const [isCloud, setCloudLicensed] = useState(false) + const [isOpenSource, setOpenSource] = useState(false) + + useEffect(() => { + const userSettings = platformsettingsApi.getSettings() + Promise.all([userSettings]) + .then(([currentSettingsData]) => { + const finalData = { + ...currentSettingsData.data + } + setConfig(finalData) + if (finalData.PLATFORM_TYPE) { + if (finalData.PLATFORM_TYPE === 'enterprise') { + setEnterpriseLicensed(true) + setCloudLicensed(false) + setOpenSource(false) + } else if (finalData.PLATFORM_TYPE === 'cloud') { + setCloudLicensed(true) + setEnterpriseLicensed(false) + setOpenSource(false) + } else { + setOpenSource(true) + setEnterpriseLicensed(false) + setCloudLicensed(false) + } + } + + setLoading(false) + }) + .catch((error) => { + console.error('Error fetching data:', error) + setLoading(false) + }) + }, []) + + return ( + {children} + ) +} + +export const useConfig = () => useContext(ConfigContext) + +ConfigProvider.propTypes = { + children: PropTypes.any +} diff --git a/packages/ui/src/store/context/ErrorContext.jsx b/packages/ui/src/store/context/ErrorContext.jsx new file mode 100644 index 000000000..e41070a15 --- /dev/null +++ b/packages/ui/src/store/context/ErrorContext.jsx @@ -0,0 +1,59 @@ +import { createContext, useContext, useState } from 'react' +import { redirectWhenUnauthorized } from '@/utils/genericHelper' +import PropTypes from 'prop-types' +import { useNavigate } from 'react-router-dom' +import { store } from '@/store' +import { logoutSuccess } from '@/store/reducers/authSlice' +import { ErrorMessage } from '../constant' + +const ErrorContext = createContext() + +export const ErrorProvider = ({ children }) => { + const [error, setError] = useState(null) + const navigate = useNavigate() + + const handleError = async (err) => { + console.error(err) + if (err?.response?.status === 403) { + navigate('/unauthorized') + } else if (err?.response?.status === 401) { + if (ErrorMessage.INVALID_MISSING_TOKEN === err?.response?.data?.message) { + store.dispatch(logoutSuccess()) + navigate('/login') + } else { + const isRedirect = err?.response?.data?.redirectTo && err?.response?.data?.error + + if (isRedirect) { + redirectWhenUnauthorized({ + error: err.response.data.error, + redirectTo: err.response.data.redirectTo + }) + } else { + const currentPath = window.location.pathname + if (currentPath !== '/signin' && currentPath !== '/login') { + store.dispatch(logoutSuccess()) + navigate('/login') + } + } + } + } else setError(err) + } + + return ( + + {children} + + ) +} + +export const useError = () => useContext(ErrorContext) + +ErrorProvider.propTypes = { + children: PropTypes.any +} diff --git a/packages/ui/src/store/reducer.jsx b/packages/ui/src/store/reducer.jsx index b464e5719..a9bb1d8f1 100644 --- a/packages/ui/src/store/reducer.jsx +++ b/packages/ui/src/store/reducer.jsx @@ -5,6 +5,7 @@ import customizationReducer from './reducers/customizationReducer' import canvasReducer from './reducers/canvasReducer' import notifierReducer from './reducers/notifierReducer' import dialogReducer from './reducers/dialogReducer' +import authReducer from './reducers/authSlice' // ==============================|| COMBINE REDUCER ||============================== // @@ -12,7 +13,8 @@ const reducer = combineReducers({ customization: customizationReducer, canvas: canvasReducer, notifier: notifierReducer, - dialog: dialogReducer + dialog: dialogReducer, + auth: authReducer }) export default reducer diff --git a/packages/ui/src/store/reducers/authSlice.js b/packages/ui/src/store/reducers/authSlice.js new file mode 100644 index 000000000..6397949cf --- /dev/null +++ b/packages/ui/src/store/reducers/authSlice.js @@ -0,0 +1,68 @@ +// authSlice.js +import { createSlice } from '@reduxjs/toolkit' +import AuthUtils from '@/utils/authUtils' + +const initialState = { + user: localStorage.getItem('user') ? JSON.parse(localStorage.getItem('user')) : null, + isAuthenticated: 'true' === localStorage.getItem('isAuthenticated'), + isGlobal: 'true' === localStorage.getItem('isGlobal'), + token: null, + permissions: + localStorage.getItem('permissions') && localStorage.getItem('permissions') !== 'undefined' + ? JSON.parse(localStorage.getItem('permissions')) + : null, + features: + localStorage.getItem('features') && localStorage.getItem('features') !== 'undefined' + ? JSON.parse(localStorage.getItem('features')) + : null +} + +const authSlice = createSlice({ + name: 'auth', + initialState, + reducers: { + loginSuccess: (state, action) => { + AuthUtils.updateStateAndLocalStorage(state, action.payload) + }, + logoutSuccess: (state) => { + state.user = null + state.token = null + state.permissions = null + state.features = null + state.isAuthenticated = false + state.isGlobal = false + AuthUtils.removeCurrentUser() + }, + workspaceSwitchSuccess: (state, action) => { + AuthUtils.updateStateAndLocalStorage(state, action.payload) + }, + upgradePlanSuccess: (state, action) => { + AuthUtils.updateStateAndLocalStorage(state, action.payload) + }, + userProfileUpdated: (state, action) => { + const user = AuthUtils.extractUser(action.payload) + state.user.name = user.name + state.user.email = user.email + AuthUtils.updateCurrentUser(state.user) + }, + workspaceNameUpdated: (state, action) => { + const updatedWorkspace = action.payload + // find the matching assignedWorkspace and update it + const assignedWorkspaces = state.user.assignedWorkspaces.map((workspace) => { + if (workspace.id === updatedWorkspace.id) { + return { + ...workspace, + name: updatedWorkspace.name + } + } + return workspace + }) + state.user.assignedWorkspaces = assignedWorkspaces + AuthUtils.updateCurrentUser(state.user) + } + } +}) + +export const { loginSuccess, logoutSuccess, workspaceSwitchSuccess, upgradePlanSuccess, userProfileUpdated, workspaceNameUpdated } = + authSlice.actions +export default authSlice.reducer diff --git a/packages/ui/src/ui-component/array/ArrayRenderer.jsx b/packages/ui/src/ui-component/array/ArrayRenderer.jsx index c38499547..79a5d0274 100644 --- a/packages/ui/src/ui-component/array/ArrayRenderer.jsx +++ b/packages/ui/src/ui-component/array/ArrayRenderer.jsx @@ -5,16 +5,18 @@ import { Chip, Box, Button, IconButton } from '@mui/material' import { useTheme } from '@mui/material/styles' import { IconTrash, IconPlus } from '@tabler/icons-react' import NodeInputHandler from '@/views/canvas/NodeInputHandler' +import DocStoreInputHandler from '@/views/docstore/DocStoreInputHandler' import { showHideInputs } from '@/utils/genericHelper' import { cloneDeep } from 'lodash' import { flowContext } from '@/store/context/ReactFlowContext' -export const ArrayRenderer = ({ inputParam, data, disabled }) => { +export const ArrayRenderer = ({ inputParam, data, disabled, isDocStore = false }) => { const [arrayItems, setArrayItems] = useState([]) // these are the actual values. Ex: [{name: 'John', age: 30}, {name: 'Jane', age: 25}] const [itemParameters, setItemParameters] = useState([]) // these are the input parameters for each array item. Ex: [{label: 'Name', type: 'string', display: true}, {label: 'age', type: 'number', display: false}] const theme = useTheme() const customization = useSelector((state) => state.customization) - const { reactFlowInstance } = useContext(flowContext) + const flowContextValue = useContext(flowContext) + const { reactFlowInstance } = flowContextValue || {} // Handler for when input values change within array items const handleItemInputChange = ({ inputParam: changedParam, newValue }, itemIndex) => { @@ -70,6 +72,9 @@ export const ArrayRenderer = ({ inputParam, data, disabled }) => { }, [data, inputParam]) const updateOutputAnchors = (items, type, indexToDelete) => { + // Skip output anchor updates for DocStore context + if (isDocStore || !reactFlowInstance) return + if (data.name !== 'conditionAgentflow' && data.name !== 'conditionAgentAgentflow') return const updatedOutputs = items.map((_, i) => ({ @@ -221,20 +226,35 @@ export const ArrayRenderer = ({ inputParam, data, disabled }) => { {/* Render input fields for array item */} {itemParameters[index] .filter((param) => param.display !== false) - .map((param, _index) => ( - { - handleItemInputChange({ inputParam, newValue }, index) - }} - /> - ))} + .map((param, _index) => { + if (isDocStore) { + return ( + { + handleItemInputChange({ inputParam, newValue }, index) + }} + /> + ) + } + return ( + { + handleItemInputChange({ inputParam, newValue }, index) + }} + /> + ) + })} ) })} @@ -257,5 +277,6 @@ export const ArrayRenderer = ({ inputParam, data, disabled }) => { ArrayRenderer.propTypes = { inputParam: PropTypes.object.isRequired, data: PropTypes.object.isRequired, - disabled: PropTypes.bool + disabled: PropTypes.bool, + isDocStore: PropTypes.bool } diff --git a/packages/ui/src/ui-component/button/FlowListMenu.jsx b/packages/ui/src/ui-component/button/FlowListMenu.jsx index 35b644da7..9ac85206f 100644 --- a/packages/ui/src/ui-component/button/FlowListMenu.jsx +++ b/packages/ui/src/ui-component/button/FlowListMenu.jsx @@ -4,7 +4,7 @@ import PropTypes from 'prop-types' import { styled, alpha } from '@mui/material/styles' import Menu from '@mui/material/Menu' -import MenuItem from '@mui/material/MenuItem' +import { PermissionMenuItem } from '@/ui-component/button/RBACButtons' import EditIcon from '@mui/icons-material/Edit' import Divider from '@mui/material/Divider' import FileCopyIcon from '@mui/icons-material/FileCopy' @@ -74,7 +74,7 @@ const StyledMenu = styled((props) => ( } })) -export default function FlowListMenu({ chatflow, isAgentCanvas, setError, updateFlowsApi }) { +export default function FlowListMenu({ chatflow, isAgentCanvas, isAgentflowV2, setError, updateFlowsApi, currentPage, pageLimit }) { const { confirm } = useConfirm() const dispatch = useDispatch() const updateChatflowApi = useApi(chatflowsApi.updateChatflow) @@ -166,10 +166,16 @@ export default function FlowListMenu({ chatflow, isAgentCanvas, setError, update } try { await updateChatflowApi.request(chatflow.id, updateBody) - if (isAgentCanvas && localStorage.getItem('agentFlowVersion') === 'v2') { - await updateFlowsApi.request('AGENTFLOW') + const params = { + page: currentPage, + limit: pageLimit + } + if (isAgentCanvas && isAgentflowV2) { + await updateFlowsApi.request('AGENTFLOW', params) + } else if (isAgentCanvas) { + await updateFlowsApi.request('MULTIAGENT', params) } else { - await updateFlowsApi.request(isAgentCanvas ? 'MULTIAGENT' : undefined) + await updateFlowsApi.request(params) } } catch (error) { if (setError) setError(error) @@ -209,7 +215,15 @@ export default function FlowListMenu({ chatflow, isAgentCanvas, setError, update } try { await updateChatflowApi.request(chatflow.id, updateBody) - await updateFlowsApi.request(isAgentCanvas ? 'AGENTFLOW' : undefined) + const params = { + page: currentPage, + limit: pageLimit + } + if (isAgentCanvas) { + await updateFlowsApi.request('AGENTFLOW', params) + } else { + await updateFlowsApi.request(params) + } } catch (error) { if (setError) setError(error) enqueueSnackbar({ @@ -241,10 +255,16 @@ export default function FlowListMenu({ chatflow, isAgentCanvas, setError, update if (isConfirmed) { try { await chatflowsApi.deleteChatflow(chatflow.id) - if (isAgentCanvas && localStorage.getItem('agentFlowVersion') === 'v2') { - await updateFlowsApi.request('AGENTFLOW') + const params = { + page: currentPage, + limit: pageLimit + } + if (isAgentCanvas && isAgentflowV2) { + await updateFlowsApi.request('AGENTFLOW', params) + } else if (isAgentCanvas) { + await updateFlowsApi.request('MULTIAGENT', params) } else { - await updateFlowsApi.request(isAgentCanvas ? 'MULTIAGENT' : undefined) + await updateFlowsApi.request(params) } } catch (error) { if (setError) setError(error) @@ -269,7 +289,13 @@ export default function FlowListMenu({ chatflow, isAgentCanvas, setError, update setAnchorEl(null) try { localStorage.setItem('duplicatedFlowData', chatflow.flowData) - window.open(`${uiBaseURL}/${isAgentCanvas ? 'agentcanvas' : 'canvas'}`, '_blank') + if (isAgentflowV2) { + window.open(`${uiBaseURL}/v2/agentcanvas`, '_blank') + } else if (isAgentCanvas) { + window.open(`${uiBaseURL}/agentcanvas`, '_blank') + } else { + window.open(`${uiBaseURL}/canvas`, '_blank') + } } catch (e) { console.error(e) } @@ -317,48 +343,84 @@ export default function FlowListMenu({ chatflow, isAgentCanvas, setError, update open={open} onClose={handleClose} > - + Rename - - + + Duplicate - - + + Export - - + + Save As Template - + - + Starter Prompts - - + + Chat Feedback - - + + Allowed Domains - - + + Speech To Text - - + + Update Category - + - + Delete - + { + const { hasPermission, hasDisplay } = useAuth() + + if (!hasPermission(permissionId) || !hasDisplay(display)) { + return null + } + + return +} + +export const StyledPermissionToggleButton = ({ permissionId, display, ...props }) => { + const { hasPermission, hasDisplay } = useAuth() + + if (!hasPermission(permissionId) || !hasDisplay(display)) { + return null + } + + return +} + +export const PermissionIconButton = ({ permissionId, display, ...props }) => { + const { hasPermission, hasDisplay } = useAuth() + + if (!hasPermission(permissionId) || !hasDisplay(display)) { + return null + } + + return +} + +export const PermissionButton = ({ permissionId, display, ...props }) => { + const { hasPermission, hasDisplay } = useAuth() + + if (!hasPermission(permissionId) || !hasDisplay(display)) { + return null + } + + return + ) + } + }) + return + } + } + setIsSaving(true) + try { + const responses = await Promise.all( + selectedUsers.map(async (item) => { + const saveObj = item.isNewUser + ? { + user: { + email: item.email, + createdBy: currentUser.id + }, + workspace: { + id: selectedWorkspace.id + }, + role: { + id: selectedRole.id + } + } + : { + user: { + email: item.user.email, + createdBy: currentUser.id + }, + workspace: { + id: selectedWorkspace.id + }, + role: { + id: selectedRole.id + } + } + + const response = await accountApi.inviteAccount(saveObj) + return response.data + }) + ) + if (responses.length > 0) { + enqueueSnackbar({ + message: 'Users invited to workspace', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm() // Pass the first ID or modify as needed + } else { + throw new Error('No data received from the server') + } + } catch (error) { + console.error('Error in saveInvite:', error) + enqueueSnackbar({ + message: `Failed to invite users to workspace: ${error.response?.data?.message || error.message || 'Unknown error'}`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } finally { + setIsSaving(false) + } + } + + const validateEmail = (email) => { + return email.match( + /^(([^<>()[\]\\.,;:\s@"]+(\.[^<>()[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/ + ) + } + + const handleChange = (event, newValue) => { + const updatedUsers = newValue + .filter((item) => { + if (item.isNewUser) { + // For new invites, validate the email + return validateEmail(item.email) + } + return true // Keep all existing users + }) + .map((item) => { + if (item.isNewUser) { + // This is a new invite + return { + email: item.email, + isNewUser: true, + alreadyInWorkspace: false + } + } else { + const existingUser = + userSearchResults.length > 0 + ? userSearchResults.find((result) => result.user.email === item.user.email) + : selectedUsers.find((result) => result.user.email === item.user.email) + return { + ...existingUser, + isNewUser: false, + alreadyInWorkspace: selectedWorkspace + ? existingUser && + existingUser.workspaceNames && + existingUser.workspaceNames.some((ws) => ws.id === selectedWorkspace.id) + : false + } + } + }) + + setSelectedUsers(updatedUsers) + + // If any invalid emails were filtered out, show a notification + if (updatedUsers.length < newValue.length) { + enqueueSnackbar({ + message: 'One or more invalid emails were removed.', + options: { + key: new Date().getTime() + Math.random(), + variant: 'warning', + action: (key) => ( + + ) + } + }) + } + } + + const handleInputChange = (event, newInputValue) => { + setSearchString(newInputValue) + const searchTerm = newInputValue.toLowerCase() + const filteredUsers = allUsers.filter( + (item) => item.user.name.toLowerCase().includes(searchTerm) || item.user.email.toLowerCase().includes(searchTerm) + ) + setUserSearchResults(filteredUsers) + setAllUsers((prevResults) => { + const newResults = [...prevResults] + filteredUsers.forEach((item) => { + if (!newResults.some((result) => result.user.id === item.user.id)) { + newResults.push(item) + } + }) + return newResults + }) + } + + const userSearchFilterOptions = (options, { inputValue }) => { + const filteredOptions = options.filter((option) => option !== null && option !== undefined) ?? [] + + // First filter out already selected users + const selectedUserEmails = selectedUsers.filter((user) => !user.isNewUser && user.user).map((user) => user.user.email) + + const unselectedOptions = filteredOptions.filter((option) => !option.user || !selectedUserEmails.includes(option.user.email)) + + const filterByNameOrEmail = unselectedOptions.filter( + (option) => + (option.user && option.user.name && option.user.name.toLowerCase().includes(inputValue.toLowerCase())) || + (option.user && option.user.email && option.user.email.toLowerCase().includes(inputValue.toLowerCase())) + ) + + // Early email detection regex + const partialEmailRegex = /^[^\s@]+@?[^\s@]*$/ + + if (filterByNameOrEmail.length === 0 && partialEmailRegex.test(inputValue)) { + // If it looks like an email (even partially), show the invite option + const inviteEmail = inputValue.includes('@') ? inputValue : `${inputValue}@` + // Check if this email is already in the selected users list + const isAlreadySelected = selectedUsers.some( + (user) => + (user.isNewUser && user.email === inviteEmail) || (!user.isNewUser && user.user && user.user.email === inviteEmail) + ) + + if (!isAlreadySelected) { + return [{ name: `Invite ${inviteEmail}`, email: inviteEmail, isNewUser: true }] + } + } + + if (filterByNameOrEmail.length === 0) { + return [{ name: 'No results found', email: '', isNoResult: true, disabled: true }] + } + + return filterByNameOrEmail + } + + const renderUserSearchInput = (params) => ( + 0 ? '' : 'Invite users by name or email'} /> + ) + + const renderUserSearchOptions = (props, option) => { + // Custom logic to determine if an option is selected, since state.selected seems unreliable + const isOptionSelected = option.isNewUser + ? selectedUsers.some((user) => user.isNewUser && user.email === option.email) + : selectedUsers.some((user) => !user.isNewUser && user.user && user.user.email === option.user?.email) + + return ( +
  • + {option.isNoResult ? ( + + No results found + + ) : option.isNewUser ? ( + + + {option.name} + + + ) : ( + + + {option.user.name} + {option.user.email} + + {isOptionSelected ? : null} + + )} +
  • + ) + } + + const renderSelectedUsersTags = (tagValue, getTagProps) => { + return selectedUsers.map((option, index) => { + const chipProps = getTagProps({ index }) + let chipType = option.isNewUser ? 'new' : 'existing' + if (option.alreadyInWorkspace) { + chipType = 'already-in-workspace' + } + const ChipComponent = option.isNewUser ? ( + + ) : ( + + ) + + const tooltipTitle = option.alreadyInWorkspace + ? `${option.user.name || option.user.email} is already a member of this workspace and won't be invited again.` + : option.isNewUser + ? 'An invitation will be sent to this email address' + : '' + + return tooltipTitle ? ( + + {ChipComponent} + + ) : ( + ChipComponent + ) + }) + } + + const handleWorkspaceChange = (event, newWorkspace) => { + setSelectedWorkspace(newWorkspace) + setSelectedUsers((prevUsers) => + prevUsers.map((user) => ({ + ...user, + alreadyInWorkspace: newWorkspace + ? user.workspaceNames && newWorkspace && user.workspaceNames.some((ws) => ws.id === newWorkspace.id) + : false + })) + ) + } + + const handleRoleChange = (event, newRole) => { + setSelectedRole(newRole) + } + + const getWorkspaceValue = () => { + if (dialogProps.data) { + return selectedWorkspace || {} + } + return selectedWorkspace || null + } + + const getRoleValue = () => { + if (dialogProps.data && dialogProps.type === 'ADD') { + return selectedRole || {} + } + return selectedRole || null + } + + const checkDisabled = () => { + if (isSaving || selectedUsers.length === 0 || !selectedWorkspace || !selectedRole) { + return true + } + return false + } + + const checkWorkspaceDisabled = () => { + if (dialogProps.data && dialogProps.type === 'ADD') { + return Boolean(selectedWorkspace) + } else if (dialogProps.data && dialogProps.type === 'EDIT') { + return dialogProps.disableWorkspaceSelection + } + return false + } + + const component = show ? ( + + +
    + + Invite Users +
    +
    + + + + Select Users * + + option.userId} + getOptionLabel={(option) => option.email || ''} + filterOptions={userSearchFilterOptions} + onChange={handleChange} + inputValue={searchString} + onInputChange={handleInputChange} + isOptionEqualToValue={(option, value) => { + // Compare based on user.email for existing users or email for new users + if (option.isNewUser && value.isNewUser) { + return option.email === value.email + } else if (!option.isNewUser && !value.isNewUser) { + return option.user?.email === value.user?.email + } + return false + }} + renderInput={renderUserSearchInput} + renderOption={renderUserSearchOptions} + renderTags={renderSelectedUsersTags} + sx={{ mt: 1 }} + value={selectedUsers} + PopperComponent={StyledPopper} + /> + + + + + Workspace * + + option.label || ''} + onChange={handleWorkspaceChange} + options={workspaces} + renderInput={(params) => } + sx={{ mt: 0.5 }} + value={getWorkspaceValue()} + PopperComponent={StyledPopper} + /> + + + + Role to Assign * + + option.label || ''} + onChange={handleRoleChange} + options={availableRoles} + renderInput={(params) => } + sx={{ mt: 0.5 }} + value={getRoleValue()} + PopperComponent={StyledPopper} + /> + + + + + + : null} + > + {dialogProps.confirmButtonName} + + + +
    + ) : null + + return createPortal(component, portalElement) +} + +InviteUsersDialog.propTypes = { + show: PropTypes.bool, + dialogProps: PropTypes.object, + onCancel: PropTypes.func, + onConfirm: PropTypes.func +} + +export default InviteUsersDialog diff --git a/packages/ui/src/ui-component/dialog/LoginDialog.jsx b/packages/ui/src/ui-component/dialog/LoginDialog.jsx deleted file mode 100644 index fe982b3b5..000000000 --- a/packages/ui/src/ui-component/dialog/LoginDialog.jsx +++ /dev/null @@ -1,70 +0,0 @@ -import { createPortal } from 'react-dom' -import { useState } from 'react' -import PropTypes from 'prop-types' - -import { Dialog, DialogActions, DialogContent, Typography, DialogTitle } from '@mui/material' -import { StyledButton } from '@/ui-component/button/StyledButton' -import { Input } from '@/ui-component/input/Input' - -const LoginDialog = ({ show, dialogProps, onConfirm }) => { - const portalElement = document.getElementById('portal') - const usernameInput = { - label: 'Username', - name: 'username', - type: 'string', - placeholder: 'john doe' - } - const passwordInput = { - label: 'Password', - name: 'password', - type: 'password' - } - const [usernameVal, setUsernameVal] = useState('') - const [passwordVal, setPasswordVal] = useState('') - - const component = show ? ( - { - if (e.key === 'Enter') { - onConfirm(usernameVal, passwordVal) - } - }} - open={show} - fullWidth - maxWidth='xs' - aria-labelledby='alert-dialog-title' - aria-describedby='alert-dialog-description' - > - - {dialogProps.title} - - - Username - setUsernameVal(newValue)} - value={usernameVal} - showDialog={false} - /> -
    - Password - setPasswordVal(newValue)} value={passwordVal} /> -
    - - onConfirm(usernameVal, passwordVal)}> - {dialogProps.confirmButtonName} - - -
    - ) : null - - return createPortal(component, portalElement) -} - -LoginDialog.propTypes = { - show: PropTypes.bool, - dialogProps: PropTypes.object, - onConfirm: PropTypes.func -} - -export default LoginDialog diff --git a/packages/ui/src/ui-component/dialog/ManageScrapedLinksDialog.jsx b/packages/ui/src/ui-component/dialog/ManageScrapedLinksDialog.jsx index 788c1998a..55d2a00c2 100644 --- a/packages/ui/src/ui-component/dialog/ManageScrapedLinksDialog.jsx +++ b/packages/ui/src/ui-component/dialog/ManageScrapedLinksDialog.jsx @@ -1,7 +1,7 @@ import PropTypes from 'prop-types' +import { useEffect, useState } from 'react' import { createPortal } from 'react-dom' import { useDispatch } from 'react-redux' -import { useState, useEffect } from 'react' import { Box, @@ -16,11 +16,11 @@ import { Stack, Typography } from '@mui/material' -import { IconEraser, IconTrash, IconX } from '@tabler/icons-react' +import { IconEraser, IconPlus, IconTrash, IconX } from '@tabler/icons-react' import PerfectScrollbar from 'react-perfect-scrollbar' -import { BackdropLoader } from '@/ui-component/loading/BackdropLoader' import { StyledButton } from '@/ui-component/button/StyledButton' +import { BackdropLoader } from '@/ui-component/loading/BackdropLoader' import scraperApi from '@/api/scraper' @@ -29,8 +29,8 @@ import useNotifier from '@/utils/useNotifier' import { HIDE_CANVAS_DIALOG, SHOW_CANVAS_DIALOG, - enqueueSnackbar as enqueueSnackbarAction, - closeSnackbar as closeSnackbarAction + closeSnackbar as closeSnackbarAction, + enqueueSnackbar as enqueueSnackbarAction } from '@/store/actions' const ManageScrapedLinksDialog = ({ show, dialogProps, onCancel, onSave }) => { @@ -112,6 +112,10 @@ const ManageScrapedLinksDialog = ({ show, dialogProps, onCancel, onSave }) => { setSelectedLinks(links) } + const handleAddLink = () => { + setSelectedLinks([...selectedLinks, '']) + } + const handleRemoveAllLinks = () => { setSelectedLinks([]) } @@ -160,6 +164,16 @@ const ManageScrapedLinksDialog = ({ show, dialogProps, onCancel, onSave }) => { Scraped Links + + handleAddLink()} + > + + + {selectedLinks.length > 0 ? ( + ) + } + }) + onCancel() + } + } catch (error) { + if (setError) setError(error) + enqueueSnackbar({ + message: `Failed to share Item: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + onCancel() + } + } + + const component = show ? ( + + +
    + + {dialogProps.data.title} +
    +
    + + + + Name + + + + + + + + + + + {dialogProps.confirmButtonName} + + + +
    + ) : null + + return createPortal(component, portalElement) +} + +ShareWithWorkspaceDialog.propTypes = { + show: PropTypes.bool, + dialogProps: PropTypes.object, + onCancel: PropTypes.func, + onConfirm: PropTypes.func, + setError: PropTypes.func +} + +export default ShareWithWorkspaceDialog diff --git a/packages/ui/src/ui-component/dialog/ViewMessagesDialog.jsx b/packages/ui/src/ui-component/dialog/ViewMessagesDialog.jsx index de36dd17b..40ed8c14f 100644 --- a/packages/ui/src/ui-component/dialog/ViewMessagesDialog.jsx +++ b/packages/ui/src/ui-component/dialog/ViewMessagesDialog.jsx @@ -24,9 +24,14 @@ import { CardContent, FormControlLabel, Checkbox, - DialogActions + DialogActions, + Pagination, + Typography, + Menu, + MenuItem, + IconButton } from '@mui/material' -import { useTheme } from '@mui/material/styles' +import { useTheme, styled, alpha } from '@mui/material/styles' import DatePicker from 'react-datepicker' import robotPNG from '@/assets/images/robot.png' @@ -34,10 +39,12 @@ import userPNG from '@/assets/images/account.png' import msgEmptySVG from '@/assets/images/message_empty.svg' import multiagent_supervisorPNG from '@/assets/images/multiagent_supervisor.png' import multiagent_workerPNG from '@/assets/images/multiagent_worker.png' -import { IconTool, IconDeviceSdCard, IconFileExport, IconEraser, IconX, IconDownload, IconPaperclip } from '@tabler/icons-react' +import { IconTool, IconDeviceSdCard, IconFileExport, IconEraser, IconX, IconDownload, IconPaperclip, IconBulb } from '@tabler/icons-react' +import KeyboardArrowDownIcon from '@mui/icons-material/KeyboardArrowDown' // Project import import { MemoizedReactMarkdown } from '@/ui-component/markdown/MemoizedReactMarkdown' +import { SafeHTML } from '@/ui-component/safe/SafeHTML' import SourceDocDialog from '@/ui-component/dialog/SourceDocDialog' import { MultiDropdown } from '@/ui-component/dropdown/MultiDropdown' import { StyledButton } from '@/ui-component/button/StyledButton' @@ -63,6 +70,42 @@ import { enqueueSnackbar as enqueueSnackbarAction, closeSnackbar as closeSnackba import '@/views/chatmessage/ChatMessage.css' import 'react-datepicker/dist/react-datepicker.css' +const StyledMenu = styled((props) => ( + +))(({ theme }) => ({ + '& .MuiPaper-root': { + borderRadius: 6, + marginTop: theme.spacing(1), + minWidth: 180, + boxShadow: + 'rgb(255, 255, 255) 0px 0px 0px 0px, rgba(0, 0, 0, 0.05) 0px 0px 0px 1px, rgba(0, 0, 0, 0.1) 0px 10px 15px -3px, rgba(0, 0, 0, 0.05) 0px 4px 6px -2px', + '& .MuiMenu-list': { + padding: '4px 0' + }, + '& .MuiMenuItem-root': { + '& .MuiSvgIcon-root': { + fontSize: 18, + color: theme.palette.text.secondary, + marginRight: theme.spacing(1.5) + }, + '&:active': { + backgroundColor: alpha(theme.palette.primary.main, theme.palette.action.selectedOpacity) + } + } + } +})) + const DatePickerCustomInput = forwardRef(function DatePickerCustomInput({ value, onClick }, ref) { return ( @@ -104,10 +147,12 @@ const ConfirmDeleteMessageDialog = ({ show, dialogProps, onCancel, onConfirm }) {dialogProps.description} - setHardDelete(event.target.checked)} />} - label='Remove messages from 3rd party Memory Node' - /> + {dialogProps.isChatflow && ( + setHardDelete(event.target.checked)} />} + label='Remove messages from 3rd party Memory Node' + /> + )} @@ -140,20 +185,21 @@ const ViewMessagesDialog = ({ show, dialogProps, onCancel }) => { const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) const [chatlogs, setChatLogs] = useState([]) - const [allChatlogs, setAllChatLogs] = useState([]) const [chatMessages, setChatMessages] = useState([]) - const [stats, setStats] = useState([]) + const [stats, setStats] = useState({}) const [selectedMessageIndex, setSelectedMessageIndex] = useState(0) const [selectedChatId, setSelectedChatId] = useState('') const [sourceDialogOpen, setSourceDialogOpen] = useState(false) const [sourceDialogProps, setSourceDialogProps] = useState({}) const [hardDeleteDialogOpen, setHardDeleteDialogOpen] = useState(false) const [hardDeleteDialogProps, setHardDeleteDialogProps] = useState({}) - const [chatTypeFilter, setChatTypeFilter] = useState([]) + const [chatTypeFilter, setChatTypeFilter] = useState(['INTERNAL', 'EXTERNAL']) const [feedbackTypeFilter, setFeedbackTypeFilter] = useState([]) const [startDate, setStartDate] = useState(new Date(new Date().setMonth(new Date().getMonth() - 1))) const [endDate, setEndDate] = useState(new Date()) const [leadEmail, setLeadEmail] = useState('') + const [anchorEl, setAnchorEl] = useState(null) + const open = Boolean(anchorEl) const getChatmessageApi = useApi(chatmessageApi.getAllChatmessageFromChatflow) const getChatmessageFromPKApi = useApi(chatmessageApi.getChatmessageFromPK) @@ -161,74 +207,70 @@ const ViewMessagesDialog = ({ show, dialogProps, onCancel }) => { const getStoragePathFromServer = useApi(chatmessageApi.getStoragePath) let storagePath = '' + /* Table Pagination */ + const [currentPage, setCurrentPage] = useState(1) + const [pageLimit, setPageLimit] = useState(10) + const [total, setTotal] = useState(0) + const onChange = (event, page) => { + setCurrentPage(page) + refresh(page, pageLimit, startDate, endDate, chatTypeFilter, feedbackTypeFilter) + } + + const refresh = (page, limit, startDate, endDate, chatTypes, feedbackTypes) => { + getChatmessageApi.request(dialogProps.chatflow.id, { + chatType: chatTypes.length ? chatTypes : undefined, + feedbackType: feedbackTypes.length ? feedbackTypes : undefined, + startDate: startDate, + endDate: endDate, + order: 'DESC', + page: page, + limit: limit + }) + getStatsApi.request(dialogProps.chatflow.id, { + chatType: chatTypes.length ? chatTypes : undefined, + feedbackType: feedbackTypes.length ? feedbackTypes : undefined, + startDate: startDate, + endDate: endDate + }) + setCurrentPage(page) + } + const onStartDateSelected = (date) => { const updatedDate = new Date(date) updatedDate.setHours(0, 0, 0, 0) setStartDate(updatedDate) - getChatmessageApi.request(dialogProps.chatflow.id, { - startDate: updatedDate, - endDate: endDate, - chatType: chatTypeFilter.length ? chatTypeFilter : undefined, - feedbackType: feedbackTypeFilter.length ? feedbackTypeFilter : undefined - }) - getStatsApi.request(dialogProps.chatflow.id, { - startDate: updatedDate, - endDate: endDate, - chatType: chatTypeFilter.length ? chatTypeFilter : undefined, - feedbackType: feedbackTypeFilter.length ? feedbackTypeFilter : undefined - }) + refresh(1, pageLimit, updatedDate, endDate, chatTypeFilter, feedbackTypeFilter) } const onEndDateSelected = (date) => { const updatedDate = new Date(date) updatedDate.setHours(23, 59, 59, 999) setEndDate(updatedDate) - getChatmessageApi.request(dialogProps.chatflow.id, { - endDate: updatedDate, - startDate: startDate, - chatType: chatTypeFilter.length ? chatTypeFilter : undefined, - feedbackType: feedbackTypeFilter.length ? feedbackTypeFilter : undefined - }) - getStatsApi.request(dialogProps.chatflow.id, { - endDate: updatedDate, - startDate: startDate, - chatType: chatTypeFilter.length ? chatTypeFilter : undefined, - feedbackType: feedbackTypeFilter.length ? feedbackTypeFilter : undefined - }) + refresh(1, pageLimit, startDate, updatedDate, chatTypeFilter, feedbackTypeFilter) } const onChatTypeSelected = (chatTypes) => { - setChatTypeFilter(chatTypes) - getChatmessageApi.request(dialogProps.chatflow.id, { - chatType: chatTypes.length ? chatTypes : undefined, - startDate: startDate, - endDate: endDate, - feedbackType: feedbackTypeFilter.length ? feedbackTypeFilter : undefined - }) - getStatsApi.request(dialogProps.chatflow.id, { - chatType: chatTypes.length ? chatTypes : undefined, - startDate: startDate, - endDate: endDate, - feedbackType: feedbackTypeFilter.length ? feedbackTypeFilter : undefined - }) + // Parse the JSON string from MultiDropdown back to an array + let parsedChatTypes = [] + if (chatTypes && typeof chatTypes === 'string' && chatTypes.startsWith('[') && chatTypes.endsWith(']')) { + parsedChatTypes = JSON.parse(chatTypes) + } else if (Array.isArray(chatTypes)) { + parsedChatTypes = chatTypes + } + setChatTypeFilter(parsedChatTypes) + refresh(1, pageLimit, startDate, endDate, parsedChatTypes, feedbackTypeFilter) } const onFeedbackTypeSelected = (feedbackTypes) => { - setFeedbackTypeFilter(feedbackTypes) - - getChatmessageApi.request(dialogProps.chatflow.id, { - chatType: chatTypeFilter.length ? chatTypeFilter : undefined, - feedbackType: feedbackTypes.length ? feedbackTypes : undefined, - startDate: startDate, - endDate: endDate, - order: 'ASC' - }) - getStatsApi.request(dialogProps.chatflow.id, { - chatType: chatTypeFilter.length ? chatTypeFilter : undefined, - feedbackType: feedbackTypes.length ? feedbackTypes : undefined, - startDate: startDate, - endDate: endDate - }) + // Parse the JSON string from MultiDropdown back to an array + let parsedFeedbackTypes = [] + if (feedbackTypes && typeof feedbackTypes === 'string' && feedbackTypes.startsWith('[') && feedbackTypes.endsWith(']')) { + parsedFeedbackTypes = JSON.parse(feedbackTypes) + } else if (Array.isArray(feedbackTypes)) { + parsedFeedbackTypes = feedbackTypes + } + setFeedbackTypeFilter(parsedFeedbackTypes) + refresh(1, pageLimit, startDate, endDate, chatTypeFilter, parsedFeedbackTypes) } const onDeleteMessages = () => { @@ -236,7 +278,8 @@ const ViewMessagesDialog = ({ show, dialogProps, onCancel }) => { title: 'Delete Messages', description: 'Are you sure you want to delete messages? This action cannot be undone.', confirmButtonName: 'Delete', - cancelButtonName: 'Cancel' + cancelButtonName: 'Cancel', + isChatflow: dialogProps.isChatflow }) setHardDeleteDialogOpen(true) } @@ -280,18 +323,7 @@ const ViewMessagesDialog = ({ show, dialogProps, onCancel }) => { ) } }) - getChatmessageApi.request(chatflowid, { - chatType: chatTypeFilter.length ? chatTypeFilter : undefined, - startDate: startDate, - endDate: endDate, - feedbackType: feedbackTypeFilter.length ? feedbackTypeFilter : undefined - }) - getStatsApi.request(chatflowid, { - chatType: chatTypeFilter.length ? chatTypeFilter : undefined, - startDate: startDate, - endDate: endDate, - feedbackType: feedbackTypeFilter.length ? feedbackTypeFilter : undefined - }) + refresh(1, pageLimit, startDate, endDate, chatTypeFilter, feedbackTypeFilter) } catch (error) { console.error(error) enqueueSnackbar({ @@ -310,6 +342,15 @@ const ViewMessagesDialog = ({ show, dialogProps, onCancel }) => { } } + const getChatType = (chatType) => { + if (chatType === 'INTERNAL') { + return 'UI' + } else if (chatType === 'EVALUATION') { + return 'Evaluation' + } + return 'API/Embed' + } + const exportMessages = async () => { if (!storagePath && getStoragePathFromServer.data) { storagePath = getStoragePathFromServer.data.storagePath @@ -319,6 +360,16 @@ const ViewMessagesDialog = ({ show, dialogProps, onCancel }) => { if ('windows' === getOS()) { fileSeparator = '\\' } + + const resp = await chatmessageApi.getAllChatmessageFromChatflow(dialogProps.chatflow.id, { + chatType: chatTypeFilter.length ? chatTypeFilter : undefined, + feedbackType: feedbackTypeFilter.length ? feedbackTypeFilter : undefined, + startDate: startDate, + endDate: endDate, + order: 'DESC' + }) + + const allChatlogs = resp.data ?? [] for (let i = 0; i < allChatlogs.length; i += 1) { const chatmsg = allChatlogs[i] const chatPK = getChatPK(chatmsg) @@ -356,7 +407,7 @@ const ViewMessagesDialog = ({ show, dialogProps, onCancel }) => { if (!Object.prototype.hasOwnProperty.call(obj, chatPK)) { obj[chatPK] = { id: chatmsg.chatId, - source: chatmsg.chatType === 'INTERNAL' ? 'UI' : 'API/Embed', + source: getChatType(chatmsg.chatType), sessionId: chatmsg.sessionId ?? null, memoryType: chatmsg.memoryType ?? null, email: chatmsg.leadEmail ?? null, @@ -546,20 +597,42 @@ const ViewMessagesDialog = ({ show, dialogProps, onCancel }) => { item: allChatMessages[i] } } else if (Object.prototype.hasOwnProperty.call(seen, PK) && seen[PK].counter === 1) { + // Properly identify user and API messages regardless of order + const firstMessage = seen[PK].item + const secondMessage = item + + let userContent = '' + let apiContent = '' + + // Check both messages and assign based on role, not order + if (firstMessage.role === 'userMessage') { + userContent = `User: ${firstMessage.content}` + } else if (firstMessage.role === 'apiMessage') { + apiContent = `Bot: ${firstMessage.content}` + } + + if (secondMessage.role === 'userMessage') { + userContent = `User: ${secondMessage.content}` + } else if (secondMessage.role === 'apiMessage') { + apiContent = `Bot: ${secondMessage.content}` + } + seen[PK] = { counter: 2, item: { ...seen[PK].item, - apiContent: - seen[PK].item.role === 'apiMessage' ? `Bot: ${seen[PK].item.content}` : `User: ${seen[PK].item.content}`, - userContent: item.role === 'apiMessage' ? `Bot: ${item.content}` : `User: ${item.content}` + apiContent, + userContent } } filteredChatLogs.push(seen[PK].item) } } - setChatLogs(filteredChatLogs) - if (filteredChatLogs.length) return getChatPK(filteredChatLogs[0]) + + // Sort by date to maintain chronological order + const sortedChatLogs = filteredChatLogs.sort((a, b) => new Date(b.createdDate) - new Date(a.createdDate)) + setChatLogs(sortedChatLogs) + if (sortedChatLogs.length) return getChatPK(sortedChatLogs[0]) return undefined } @@ -604,6 +677,14 @@ const ViewMessagesDialog = ({ show, dialogProps, onCancel }) => { setSourceDialogOpen(true) } + const handleClick = (event) => { + setAnchorEl(event.currentTarget) + } + + const handleClose = () => { + setAnchorEl(null) + } + const renderFileUploads = (item, index) => { if (item?.mime?.startsWith('image/')) { return ( @@ -676,7 +757,6 @@ const ViewMessagesDialog = ({ show, dialogProps, onCancel }) => { if (getChatmessageApi.data) { getStoragePathFromServer.request() - setAllChatLogs(getChatmessageApi.data) const chatPK = processChatLogs(getChatmessageApi.data) setSelectedMessageIndex(0) if (chatPK) { @@ -697,15 +777,13 @@ const ViewMessagesDialog = ({ show, dialogProps, onCancel }) => { useEffect(() => { if (getStatsApi.data) { setStats(getStatsApi.data) + setTotal(getStatsApi.data?.totalSessions ?? 0) } }, [getStatsApi.data]) useEffect(() => { if (dialogProps.chatflow) { - getChatmessageApi.request(dialogProps.chatflow.id, { - startDate: startDate, - endDate: endDate - }) + refresh(currentPage, pageLimit, startDate, endDate, chatTypeFilter, feedbackTypeFilter) getStatsApi.request(dialogProps.chatflow.id, { startDate: startDate, endDate: endDate @@ -714,9 +792,8 @@ const ViewMessagesDialog = ({ show, dialogProps, onCancel }) => { return () => { setChatLogs([]) - setAllChatLogs([]) setChatMessages([]) - setChatTypeFilter([]) + setChatTypeFilter(['INTERNAL', 'EXTERNAL']) setFeedbackTypeFilter([]) setSelectedMessageIndex(0) setSelectedChatId('') @@ -724,6 +801,9 @@ const ViewMessagesDialog = ({ show, dialogProps, onCancel }) => { setEndDate(new Date()) setStats([]) setLeadEmail('') + setTotal(0) + setCurrentPage(1) + setPageLimit(10) } // eslint-disable-next-line react-hooks/exhaustive-deps @@ -739,16 +819,7 @@ const ViewMessagesDialog = ({ show, dialogProps, onCancel }) => { if (dialogProps.chatflow) { // when the filter is cleared fetch all messages if (feedbackTypeFilter.length === 0) { - getChatmessageApi.request(dialogProps.chatflow.id, { - startDate: startDate, - endDate: endDate, - chatType: chatTypeFilter.length ? chatTypeFilter : undefined - }) - getStatsApi.request(dialogProps.chatflow.id, { - startDate: startDate, - endDate: endDate, - chatType: chatTypeFilter.length ? chatTypeFilter : undefined - }) + refresh(currentPage, pageLimit, startDate, endDate, chatTypeFilter, feedbackTypeFilter) } } // eslint-disable-next-line react-hooks/exhaustive-deps @@ -797,7 +868,7 @@ const ViewMessagesDialog = ({ show, dialogProps, onCancel }) => { } else if (item.type === 'html') { return (
    -
    +
    ) } else { @@ -810,19 +881,10 @@ const ViewMessagesDialog = ({ show, dialogProps, onCancel }) => { onClose={onCancel} open={show} fullWidth - maxWidth={'lg'} + maxWidth={'xl'} aria-labelledby='alert-dialog-title' aria-describedby='alert-dialog-description' > - -
    - {dialogProps.title} -
    - -
    - <>
    { { label: 'API/Embed', name: 'EXTERNAL' + }, + { + label: 'Evaluations', + name: 'EVALUATION' } ]} onSelect={(newValue) => onChatTypeSelected(newValue)} @@ -899,7 +965,7 @@ const ViewMessagesDialog = ({ show, dialogProps, onCancel }) => { Feedback { />
    - {stats.totalMessages > 0 && ( - - )} + + + { + handleClose() + exportMessages() + }} + disableRipple + > + + Export to JSON + + {(stats.totalMessages ?? 0) > 0 && ( + { + handleClose() + onDeleteMessages() + }} + disableRipple + > + + Delete All + + )} +
    - - + + +
    -
    - {chatlogs && chatlogs.length == 0 && ( +
    + {chatlogs && chatlogs.length === 0 && ( { )} {chatlogs && chatlogs.length > 0 && ( -
    +
    { maxHeight: 'calc(100vh - 260px)' }} > +
    + + Sessions {pageLimit * (currentPage - 1) + 1} - {Math.min(pageLimit * currentPage, total)} of{' '} + {total} + + +
    {chatlogs.map((chatmsg, index) => ( {
    )} {chatlogs && chatlogs.length > 0 && ( -
    +
    {chatMessages && chatMessages.length > 1 && ( -
    +
    {chatMessages[1].sessionId && (
    @@ -1016,7 +1154,7 @@ const ViewMessagesDialog = ({ show, dialogProps, onCancel }) => { )} {chatMessages[1].chatType && (
    - Source: {chatMessages[1].chatType === 'INTERNAL' ? 'UI' : 'API/Embed'} + Source: {getChatType(chatMessages[1].chatType)}
    )} {chatMessages[1].memoryType && ( @@ -1033,31 +1171,26 @@ const ViewMessagesDialog = ({ show, dialogProps, onCancel }) => {
    - clearChat(chatMessages[1])} - startIcon={} - > - Clear - + + clearChat(chatMessages[1])}> + + + {chatMessages[1].sessionId && ( -
    - Why my session is not deleted? -
    + + +
    )}
    @@ -1068,12 +1201,15 @@ const ViewMessagesDialog = ({ show, dialogProps, onCancel }) => { display: 'flex', flexDirection: 'column', marginLeft: '20px', - border: '1px solid #e0e0e0', - borderRadius: `${customization.borderRadius}px` + marginBottom: '5px', + border: customization.isDarkMode ? 'none' : '1px solid #e0e0e0', + boxShadow: customization.isDarkMode ? '0 0 5px 0 rgba(255, 255, 255, 0.5)' : 'none', + borderRadius: `10px`, + overflow: 'hidden' }} className='cloud-message' > -
    +
    {chatMessages && chatMessages.map((message, index) => { if (message.type === 'apiMessage' || message.type === 'userMessage') { @@ -1112,7 +1248,9 @@ const ViewMessagesDialog = ({ show, dialogProps, onCancel }) => { style={{ display: 'flex', flexDirection: 'column', - width: '100%' + width: '100%', + minWidth: 0, + overflow: 'hidden' }} > {message.fileUploads && message.fileUploads.length > 0 && ( @@ -1399,7 +1537,10 @@ const ViewMessagesDialog = ({ show, dialogProps, onCancel }) => { })}
    )} -
    +
    {message.message} @@ -1473,7 +1614,9 @@ const ViewMessagesDialog = ({ show, dialogProps, onCancel }) => { return ( { const selectedParam = nodeData.inputParams.find((param) => param.name === name) const loadMethod = selectedParam?.loadMethod - const username = localStorage.getItem('username') - const password = localStorage.getItem('password') + + let credentialId = nodeData.credential + if (!credentialId && (nodeData.inputs?.credential || nodeData.inputs?.['FLOWISE_CREDENTIAL_ID'])) { + credentialId = nodeData.inputs.credential || nodeData.inputs?.['FLOWISE_CREDENTIAL_ID'] + } + + let config = { + headers: { + 'x-request-from': 'internal', + 'Content-type': 'application/json' + }, + withCredentials: true + } let lists = await axios .post( `${baseURL}/api/v1/node-load-method/${nodeData.name}`, - { ...nodeData, loadMethod, previousNodes, currentNode }, - { - auth: username && password ? { username, password } : undefined, - headers: { 'Content-type': 'application/json', 'x-request-from': 'internal' } - } + { ...nodeData, loadMethod, previousNodes, currentNode, credential: credentialId }, + config ) .then(async function (response) { return response.data @@ -63,7 +71,8 @@ export const AsyncDropdown = ({ disabled = false, freeSolo = false, disableClearable = false, - multiple = false + multiple = false, + fullWidth = false }) => { const customization = useSelector((state) => state.customization) const theme = useTheme() @@ -176,7 +185,7 @@ export const AsyncDropdown = ({ multiple={multiple} filterSelectedOptions={multiple} size='small' - sx={{ mt: 1, width: '100%' }} + sx={{ mt: 1, width: fullWidth ? '100%' : multiple ? '90%' : '100%' }} open={open} onOpen={() => { setOpen(true) @@ -211,7 +220,8 @@ export const AsyncDropdown = ({ const matchingOptions = multiple ? findMatchingOptions(options, internalValue) : [findMatchingOptions(options, internalValue)].filter(Boolean) - return ( + + const textField = ( ) + + return !multiple ? ( + textField + ) : ( + + {textField} + + ) }} renderOption={(props, option) => ( @@ -295,5 +319,6 @@ AsyncDropdown.propTypes = { credentialNames: PropTypes.array, disableClearable: PropTypes.bool, isCreateNewOption: PropTypes.bool, - multiple: PropTypes.bool + multiple: PropTypes.bool, + fullWidth: PropTypes.bool } diff --git a/packages/ui/src/ui-component/extended/AudioWaveform.jsx b/packages/ui/src/ui-component/extended/AudioWaveform.jsx new file mode 100644 index 000000000..a6d75845a --- /dev/null +++ b/packages/ui/src/ui-component/extended/AudioWaveform.jsx @@ -0,0 +1,311 @@ +import { useRef, useEffect, useState, useCallback } from 'react' +import PropTypes from 'prop-types' +import { Box, IconButton, CircularProgress } from '@mui/material' +import { IconPlayerPlay, IconPlayerPause } from '@tabler/icons-react' +import { useTheme } from '@mui/material/styles' + +const AudioWaveform = ({ + audioSrc, + onPlay, + onPause, + onEnded, + isPlaying = false, + duration: _duration = 0, + isGenerating = false, + disabled = false, + externalAudioRef = null, + resetProgress = false +}) => { + const canvasRef = useRef(null) + const audioRef = useRef(null) + const animationRef = useRef(null) + const theme = useTheme() + + const [progress, setProgress] = useState(0) + const [_audioBuffer, setAudioBuffer] = useState(null) + const [waveformData, setWaveformData] = useState([]) + + // Generate waveform visualization data + const generateWaveform = useCallback((buffer) => { + if (!buffer) return [] + + const rawData = buffer.getChannelData(0) + const samples = 200 // More bars for smoother appearance like reference + const blockSize = Math.floor(rawData.length / samples) + const filteredData = [] + + for (let i = 0; i < samples; i++) { + let blockStart = blockSize * i + let sum = 0 + for (let j = 0; j < blockSize; j++) { + sum += Math.abs(rawData[blockStart + j]) + } + filteredData.push(sum / blockSize) + } + + // Normalize the data + const maxValue = Math.max(...filteredData) + return filteredData.map((value) => (value / maxValue) * 100) + }, []) + + // Generate realistic placeholder waveform like in reference + const generatePlaceholderWaveform = useCallback(() => { + const samples = 200 + const waveform = [] + + for (let i = 0; i < samples; i++) { + // Create a more realistic waveform pattern + const position = i / samples + const baseHeight = 20 + Math.sin(position * Math.PI * 4) * 15 + const variation = Math.random() * 40 + 10 + const envelope = Math.sin(position * Math.PI) * 0.8 + 0.2 + + waveform.push((baseHeight + variation) * envelope) + } + + return waveform + }, []) + + // Draw waveform on canvas + const drawWaveform = useCallback(() => { + const canvas = canvasRef.current + if (!canvas || waveformData.length === 0) return + + const ctx = canvas.getContext('2d') + + // Handle high DPI displays for crisp rendering + const dpr = window.devicePixelRatio || 1 + const rect = canvas.getBoundingClientRect() + + canvas.width = rect.width * dpr + canvas.height = rect.height * dpr + ctx.scale(dpr, dpr) + + canvas.style.width = rect.width + 'px' + canvas.style.height = rect.height + 'px' + + ctx.clearRect(0, 0, rect.width, rect.height) + + // More bars for smoother appearance like the reference + const totalBars = waveformData.length + const barWidth = 2 // Fixed thin bar width like in reference + const barSpacing = 1 // Small gap between bars + const totalWidth = rect.width + const startX = (totalWidth - totalBars * (barWidth + barSpacing)) / 2 + const centerY = rect.height / 2 + + waveformData.forEach((value, index) => { + const barHeight = Math.max(2, (value / 100) * (rect.height * 0.8)) + const x = startX + index * (barWidth + barSpacing) + + // Determine color based on playback progress + const progressIndex = Math.floor((progress / 100) * waveformData.length) + const isPlayed = index <= progressIndex + + ctx.fillStyle = isPlayed ? theme.palette.primary.main : theme.palette.mode === 'dark' ? '#444' : '#ccc' + + // Draw thin vertical bars like in reference + ctx.fillRect(x, centerY - barHeight / 2, barWidth, barHeight) + }) + }, [waveformData, progress, theme]) + + // Load and decode audio for waveform generation + useEffect(() => { + if (audioSrc && audioSrc.startsWith('blob:')) { + const loadAudioBuffer = async () => { + try { + const response = await fetch(audioSrc) + const arrayBuffer = await response.arrayBuffer() + const audioContext = new (window.AudioContext || window.webkitAudioContext)() + const buffer = await audioContext.decodeAudioData(arrayBuffer) + setAudioBuffer(buffer) + const waveform = generateWaveform(buffer) + setWaveformData(waveform) + } catch (error) { + console.error('Error loading audio buffer:', error) + // Generate placeholder waveform + const placeholder = generatePlaceholderWaveform() + setWaveformData(placeholder) + } + } + loadAudioBuffer() + } else { + // Always show placeholder waveform when no audio source + const placeholder = generatePlaceholderWaveform() + setWaveformData(placeholder) + } + }, [audioSrc, generateWaveform, generatePlaceholderWaveform]) + + // Reset progress when resetProgress prop is true + useEffect(() => { + if (resetProgress) { + setProgress(0) + } + }, [resetProgress]) + + // Draw waveform when data changes or progress updates + useEffect(() => { + drawWaveform() + }, [drawWaveform, progress]) + + // Update progress during playback + useEffect(() => { + const activeAudioRef = externalAudioRef || audioRef.current + if (isPlaying && activeAudioRef && audioSrc) { + const updateProgress = () => { + const audio = externalAudioRef || audioRef.current + if (audio && audio.duration && !isNaN(audio.duration)) { + const currentProgress = (audio.currentTime / audio.duration) * 100 + setProgress(currentProgress) + } + if (isPlaying && audio && !audio.paused) { + animationRef.current = requestAnimationFrame(updateProgress) + } + } + + // Start the update loop + animationRef.current = requestAnimationFrame(updateProgress) + } else { + if (animationRef.current) { + cancelAnimationFrame(animationRef.current) + } + } + + return () => { + if (animationRef.current) { + cancelAnimationFrame(animationRef.current) + } + } + }, [isPlaying, audioSrc, externalAudioRef]) + + const handlePlayPause = () => { + if (isPlaying) { + onPause?.() + } else { + onPlay?.() + } + } + + // Handle canvas click for seeking + const handleCanvasClick = (event) => { + const activeAudio = externalAudioRef || audioRef.current + if (!activeAudio || !activeAudio.duration || disabled || isGenerating) return + + const canvas = canvasRef.current + const rect = canvas.getBoundingClientRect() + const clickX = event.clientX - rect.left + + // Use the actual canvas display width for more accurate clicking + const clickProgress = Math.max(0, Math.min(100, (clickX / rect.width) * 100)) + const seekTime = (clickProgress / 100) * activeAudio.duration + + activeAudio.currentTime = seekTime + setProgress(clickProgress) + } + + return ( + + {/* Hidden audio element for duration and seeking - only if no external ref */} + {audioSrc && !externalAudioRef && ( + + )} + + {/* Play button and Waveform side by side */} + + {/* Play/Pause Button */} + + {isGenerating ? ( + + ) : isPlaying ? ( + + ) : ( + + )} + + + {/* Waveform Canvas */} + + + + + + ) +} + +AudioWaveform.propTypes = { + audioSrc: PropTypes.string, + onPlay: PropTypes.func, + onPause: PropTypes.func, + onEnded: PropTypes.func, + isPlaying: PropTypes.bool, + duration: PropTypes.number, + isGenerating: PropTypes.bool, + disabled: PropTypes.bool, + externalAudioRef: PropTypes.object, + resetProgress: PropTypes.bool +} + +export default AudioWaveform diff --git a/packages/ui/src/ui-component/extended/FileUpload.jsx b/packages/ui/src/ui-component/extended/FileUpload.jsx index 02063775b..d688cb754 100644 --- a/packages/ui/src/ui-component/extended/FileUpload.jsx +++ b/packages/ui/src/ui-component/extended/FileUpload.jsx @@ -1,4 +1,4 @@ -import { useDispatch } from 'react-redux' +import { useDispatch, useSelector } from 'react-redux' import { useState, useEffect } from 'react' import PropTypes from 'prop-types' import { enqueueSnackbar as enqueueSnackbarAction, closeSnackbar as closeSnackbarAction, SET_CHATFLOW } from '@/store/actions' @@ -18,26 +18,30 @@ import useNotifier from '@/utils/useNotifier' // API import chatflowsApi from '@/api/chatflows' -const message = `Uploaded files will be parsed as strings and sent to the LLM. If file upload is enabled on the Vector Store as well, this will override and take precedence. +const message = `The full contents of uploaded files will be converted to text and sent to the Agent.
    Refer docs for more details.` const availableFileTypes = [ - { name: 'CSS', ext: 'text/css' }, - { name: 'CSV', ext: 'text/csv' }, - { name: 'HTML', ext: 'text/html' }, - { name: 'JSON', ext: 'application/json' }, - { name: 'Markdown', ext: 'text/markdown' }, - { name: 'PDF', ext: 'application/pdf' }, - { name: 'SQL', ext: 'application/sql' }, - { name: 'Text File', ext: 'text/plain' }, - { name: 'XML', ext: 'application/xml' }, - { name: 'DOC', ext: 'application/msword' }, - { name: 'DOCX', ext: 'application/vnd.openxmlformats-officedocument.wordprocessingml.document' } + { name: 'CSS', ext: 'text/css', extension: '.css' }, + { name: 'CSV', ext: 'text/csv', extension: '.csv' }, + { name: 'HTML', ext: 'text/html', extension: '.html' }, + { name: 'JSON', ext: 'application/json', extension: '.json' }, + { name: 'Markdown', ext: 'text/markdown', extension: '.md' }, + { name: 'YAML', ext: 'application/x-yaml', extension: '.yaml' }, + { name: 'PDF', ext: 'application/pdf', extension: '.pdf' }, + { name: 'SQL', ext: 'application/sql', extension: '.sql' }, + { name: 'Text File', ext: 'text/plain', extension: '.txt' }, + { name: 'XML', ext: 'application/xml', extension: '.xml' }, + { name: 'DOC', ext: 'application/msword', extension: '.doc' }, + { name: 'DOCX', ext: 'application/vnd.openxmlformats-officedocument.wordprocessingml.document', extension: '.docx' }, + { name: 'XLSX', ext: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet', extension: '.xlsx' }, + { name: 'PPTX', ext: 'application/vnd.openxmlformats-officedocument.presentationml.presentation', extension: '.pptx' } ] const FileUpload = ({ dialogProps }) => { const dispatch = useDispatch() + const customization = useSelector((state) => state.customization) useNotifier() @@ -221,30 +225,49 @@ const FileUpload = ({ dialogProps }) => { onChange={handleAllowedFileTypesChange} />
    ))}
    - - PDF Usage - - - } label='One document per page' /> - } label='One document per file' /> - - - + {allowedFileTypes.includes('application/pdf') && fullFileUpload && ( + + + PDF Configuration + - - - + + PDF Usage + + + } label='One document per page' /> + } label='One document per file' /> + + + + + + + + + )} Save diff --git a/packages/ui/src/ui-component/extended/FollowUpPrompts.jsx b/packages/ui/src/ui-component/extended/FollowUpPrompts.jsx index 680e4f11c..bc0d95bc1 100644 --- a/packages/ui/src/ui-component/extended/FollowUpPrompts.jsx +++ b/packages/ui/src/ui-component/extended/FollowUpPrompts.jsx @@ -15,6 +15,7 @@ import azureOpenAiIcon from '@/assets/images/azure_openai.svg' import mistralAiIcon from '@/assets/images/mistralai.svg' import openAiIcon from '@/assets/images/openai.svg' import groqIcon from '@/assets/images/groq.png' +import geminiIcon from '@/assets/images/gemini.png' import ollamaIcon from '@/assets/images/ollama.svg' import { TooltipWithParser } from '@/ui-component/tooltip/TooltipWithParser' import CredentialInputHandler from '@/views/canvas/CredentialInputHandler' @@ -117,7 +118,7 @@ const followUpPromptsOptions = { [FollowUpPromptProviders.GOOGLE_GENAI]: { label: 'Google Gemini', name: FollowUpPromptProviders.GOOGLE_GENAI, - icon: azureOpenAiIcon, + icon: geminiIcon, inputs: [ { label: 'Connect Credential', @@ -128,12 +129,8 @@ const followUpPromptsOptions = { { label: 'Model Name', name: 'modelName', - type: 'options', - default: 'gemini-1.5-pro-latest', - options: [ - { label: 'gemini-1.5-flash-latest', name: 'gemini-1.5-flash-latest' }, - { label: 'gemini-1.5-pro-latest', name: 'gemini-1.5-pro-latest' } - ] + type: 'asyncOptions', + loadMethod: 'listModels' }, { label: 'Prompt', @@ -204,11 +201,8 @@ const followUpPromptsOptions = { { label: 'Model Name', name: 'modelName', - type: 'options', - options: [ - { label: 'mistral-large-latest', name: 'mistral-large-latest' }, - { label: 'mistral-large-2402', name: 'mistral-large-2402' } - ] + type: 'asyncOptions', + loadMethod: 'listModels' }, { label: 'Prompt', diff --git a/packages/ui/src/ui-component/extended/OverrideConfig.jsx b/packages/ui/src/ui-component/extended/OverrideConfig.jsx index 1a3f4e38d..df1e8732e 100644 --- a/packages/ui/src/ui-component/extended/OverrideConfig.jsx +++ b/packages/ui/src/ui-component/extended/OverrideConfig.jsx @@ -48,21 +48,30 @@ const OverrideConfigTable = ({ columns, onToggle, rows, sx }) => { return handleChange(enabled, row)} value={row.enabled} /> } else if (key === 'type' && row.schema) { // If there's schema information, add a tooltip - const schemaContent = - '[
    ' + - row.schema - .map( - (item) => - `  ${JSON.stringify( - { - [item.name]: item.type - }, - null, - 2 - )}` - ) - .join(',
    ') + - '
    ]' + let schemaContent + if (Array.isArray(row.schema)) { + // Handle array format: [{ name: "field", type: "string" }, ...] + schemaContent = + '[
    ' + + row.schema + .map( + (item) => + `  ${JSON.stringify( + { + [item.name]: item.type + }, + null, + 2 + )}` + ) + .join(',
    ') + + '
    ]' + } else if (typeof row.schema === 'object' && row.schema !== null) { + // Handle object format: { "field": "string", "field2": "number", ... } + schemaContent = JSON.stringify(row.schema, null, 2).replace(/\n/g, '
    ').replace(/ /g, ' ') + } else { + schemaContent = 'No schema available' + } return ( diff --git a/packages/ui/src/ui-component/extended/SpeechToText.jsx b/packages/ui/src/ui-component/extended/SpeechToText.jsx index d119e5f89..d397cb2ca 100644 --- a/packages/ui/src/ui-component/extended/SpeechToText.jsx +++ b/packages/ui/src/ui-component/extended/SpeechToText.jsx @@ -379,7 +379,11 @@ const SpeechToText = ({ dialogProps }) => { width: 50, height: 50, borderRadius: '50%', - backgroundColor: 'white' + backgroundColor: 'white', + flexShrink: 0, + display: 'flex', + alignItems: 'center', + justifyContent: 'center' }} > { sx={{ ml: 1 }} primary={speechToTextProviders[selectedProvider].label} secondary={ - + {speechToTextProviders[selectedProvider].url} } diff --git a/packages/ui/src/ui-component/extended/TextToSpeech.jsx b/packages/ui/src/ui-component/extended/TextToSpeech.jsx new file mode 100644 index 000000000..226d45a1e --- /dev/null +++ b/packages/ui/src/ui-component/extended/TextToSpeech.jsx @@ -0,0 +1,660 @@ +import { useDispatch } from 'react-redux' +import { useState, useEffect } from 'react' +import PropTypes from 'prop-types' +import { enqueueSnackbar as enqueueSnackbarAction, closeSnackbar as closeSnackbarAction, SET_CHATFLOW } from '@/store/actions' + +// material-ui +import { + Typography, + Box, + Button, + FormControl, + ListItem, + ListItemAvatar, + ListItemText, + MenuItem, + Select, + CircularProgress, + Autocomplete, + TextField +} from '@mui/material' +import { IconX, IconVolume } from '@tabler/icons-react' +import { useTheme } from '@mui/material/styles' + +// Project import +import CredentialInputHandler from '@/views/canvas/CredentialInputHandler' +import { TooltipWithParser } from '@/ui-component/tooltip/TooltipWithParser' +import { SwitchInput } from '@/ui-component/switch/Switch' +import { Input } from '@/ui-component/input/Input' +import { StyledButton } from '@/ui-component/button/StyledButton' +import { Dropdown } from '@/ui-component/dropdown/Dropdown' +import AudioWaveform from '@/ui-component/extended/AudioWaveform' +import openAISVG from '@/assets/images/openai.svg' +import elevenLabsSVG from '@/assets/images/elevenlabs.svg' + +// store +import useNotifier from '@/utils/useNotifier' + +// API +import chatflowsApi from '@/api/chatflows' +import ttsApi from '@/api/tts' + +const TextToSpeechType = { + OPENAI_TTS: 'openai', + ELEVEN_LABS_TTS: 'elevenlabs' +} + +// Weird quirk - the key must match the name property value. +const textToSpeechProviders = { + [TextToSpeechType.OPENAI_TTS]: { + label: 'OpenAI TTS', + name: TextToSpeechType.OPENAI_TTS, + icon: openAISVG, + url: 'https://platform.openai.com/docs/guides/text-to-speech', + inputs: [ + { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + credentialNames: ['openAIApi'] + }, + { + label: 'Voice', + name: 'voice', + type: 'voice_select', + description: 'The voice to use when generating the audio', + default: 'alloy', + optional: true + } + ] + }, + [TextToSpeechType.ELEVEN_LABS_TTS]: { + label: 'Eleven Labs TTS', + name: TextToSpeechType.ELEVEN_LABS_TTS, + icon: elevenLabsSVG, + url: 'https://elevenlabs.io/', + inputs: [ + { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + credentialNames: ['elevenLabsApi'] + }, + { + label: 'Voice', + name: 'voice', + type: 'voice_select', + description: 'The voice to use for text-to-speech', + default: '21m00Tcm4TlvDq8ikWAM', + optional: true + } + ] + } +} + +const TextToSpeech = ({ dialogProps }) => { + const dispatch = useDispatch() + + useNotifier() + const theme = useTheme() + + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const [textToSpeech, setTextToSpeech] = useState(null) + const [selectedProvider, setSelectedProvider] = useState('none') + const [voices, setVoices] = useState([]) + const [loadingVoices, setLoadingVoices] = useState(false) + const [testAudioSrc, setTestAudioSrc] = useState(null) + const [isTestPlaying, setIsTestPlaying] = useState(false) + const [testAudioRef, setTestAudioRef] = useState(null) + const [isGeneratingTest, setIsGeneratingTest] = useState(false) + const [resetWaveform, setResetWaveform] = useState(false) + + const resetTestAudio = () => { + if (testAudioSrc) { + URL.revokeObjectURL(testAudioSrc) + setTestAudioSrc(null) + } + setIsTestPlaying(false) + setResetWaveform(true) + setTimeout(() => setResetWaveform(false), 100) + } + + const onSave = async () => { + const textToSpeechConfig = setValue(true, selectedProvider, 'status') + try { + const saveResp = await chatflowsApi.updateChatflow(dialogProps.chatflow.id, { + textToSpeech: JSON.stringify(textToSpeechConfig) + }) + if (saveResp.data) { + enqueueSnackbar({ + message: 'Text To Speech Configuration Saved', + options: { + key: Date.now() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + dispatch({ type: SET_CHATFLOW, chatflow: saveResp.data }) + } + } catch (error) { + enqueueSnackbar({ + message: `Failed to save Text To Speech Configuration: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: Date.now() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } + } + + const setValue = (value, providerName, inputParamName) => { + let newVal = {} + if (!textToSpeech || !Object.hasOwn(textToSpeech, providerName)) { + newVal = { ...(textToSpeech || {}), [providerName]: {} } + } else { + newVal = { ...textToSpeech } + } + + newVal[providerName][inputParamName] = value + if (inputParamName === 'status' && value === true) { + // ensure that the others are turned off + Object.keys(textToSpeechProviders).forEach((key) => { + const provider = textToSpeechProviders[key] + if (provider.name !== providerName) { + newVal[provider.name] = { ...(textToSpeech?.[provider.name] || {}), status: false } + } + }) + if (providerName !== 'none' && newVal['none']) { + newVal['none'].status = false + } + } + + // Reset test audio when voice or credential is changed + if ((inputParamName === 'voice' || inputParamName === 'credentialId') && providerName === selectedProvider) { + resetTestAudio() + } + + setTextToSpeech(newVal) + return newVal + } + + const handleProviderChange = (provider, configOverride = null) => { + setSelectedProvider(provider) + setVoices([]) + resetTestAudio() + + if (provider !== 'none') { + const config = configOverride || textToSpeech + const credentialId = config?.[provider]?.credentialId + if (credentialId) { + loadVoicesForProvider(provider, credentialId) + } + } + } + + const loadVoicesForProvider = async (provider, credentialId) => { + if (provider === 'none' || !credentialId) return + + setLoadingVoices(true) + try { + const params = new URLSearchParams({ provider }) + params.append('credentialId', credentialId) + + const response = await ttsApi.listVoices(params) + + if (response.data) { + const voicesData = await response.data + setVoices(voicesData) + } else { + setVoices([]) + } + } catch (error) { + console.error('Error loading voices:', error) + setVoices([]) + } finally { + setLoadingVoices(false) + } + } + + const testTTS = async () => { + if (selectedProvider === 'none' || !textToSpeech?.[selectedProvider]?.credentialId) { + enqueueSnackbar({ + message: 'Please select a provider and configure credentials first', + options: { variant: 'warning' } + }) + return + } + + setIsGeneratingTest(true) + + try { + const providerConfig = textToSpeech?.[selectedProvider] || {} + const body = { + text: 'Today is a wonderful day to build something with Flowise!', + provider: selectedProvider, + credentialId: providerConfig.credentialId, + voice: providerConfig.voice, + model: providerConfig.model + } + + const response = await fetch('/api/v1/text-to-speech/generate', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'x-request-from': 'internal' + }, + credentials: 'include', + body: JSON.stringify(body) + }) + + if (!response.ok) { + throw new Error(`HTTP error! status: ${response.status}`) + } + + const audioChunks = [] + const reader = response.body.getReader() + let buffer = '' + + let done = false + while (!done) { + const result = await reader.read() + done = result.done + if (done) break + + const chunk = new TextDecoder().decode(result.value, { stream: true }) + buffer += chunk + const lines = buffer.split('\n\n') + buffer = lines.pop() || '' + + for (const eventBlock of lines) { + if (eventBlock.trim()) { + const event = parseSSEEvent(eventBlock) + if (event && event.event === 'tts_data' && event.data?.audioChunk) { + const audioBuffer = Uint8Array.from(atob(event.data.audioChunk), (c) => c.charCodeAt(0)) + audioChunks.push(audioBuffer) + } + } + } + } + + if (audioChunks.length > 0) { + // Combine all chunks into a single blob + const totalLength = audioChunks.reduce((sum, chunk) => sum + chunk.length, 0) + const combinedBuffer = new Uint8Array(totalLength) + let offset = 0 + + for (const chunk of audioChunks) { + combinedBuffer.set(chunk, offset) + offset += chunk.length + } + + const audioBlob = new Blob([combinedBuffer], { type: 'audio/mpeg' }) + const audioUrl = URL.createObjectURL(audioBlob) + + // Clean up previous audio + if (testAudioSrc) { + URL.revokeObjectURL(testAudioSrc) + } + + setTestAudioSrc(audioUrl) + } else { + throw new Error('No audio data received') + } + } catch (error) { + console.error('Error testing TTS:', error) + enqueueSnackbar({ + message: `TTS test failed: ${error.message}`, + options: { variant: 'error' } + }) + } finally { + setIsGeneratingTest(false) + } + } + + const parseSSEEvent = (eventBlock) => { + const lines = eventBlock.trim().split('\n') + const event = { event: null, data: null } + + for (const line of lines) { + if (line.startsWith('event:')) { + event.event = line.substring(6).trim() + } else if (line.startsWith('data:')) { + const dataStr = line.substring(5).trim() + try { + const parsed = JSON.parse(dataStr) + if (parsed.data) { + event.data = parsed.data + } + } catch (e) { + console.error('Error parsing SSE data:', e) + } + } + } + return event.event ? event : null + } + + // Audio control functions for waveform component + const handleTestPlay = async () => { + // If audio already exists, just play it + if (testAudioRef && testAudioSrc) { + testAudioRef.play() + setIsTestPlaying(true) + return + } + + // If no audio exists, generate it first + if (!testAudioSrc) { + await testTTS() + // testTTS will set the audio source, and we'll play it in the next useEffect + } + } + + const handleTestPause = () => { + if (testAudioRef) { + testAudioRef.pause() + setIsTestPlaying(false) + } + } + + const handleTestEnded = () => { + setIsTestPlaying(false) + } + + // Auto-play when audio is generated (if user clicked play) + useEffect(() => { + if (testAudioSrc && testAudioRef && !isTestPlaying) { + // Small delay to ensure audio element is ready + setTimeout(() => { + testAudioRef.play() + setIsTestPlaying(true) + }, 100) + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [testAudioSrc, testAudioRef]) + + useEffect(() => { + if (dialogProps.chatflow && dialogProps.chatflow.textToSpeech) { + try { + const textToSpeechConfig = JSON.parse(dialogProps.chatflow.textToSpeech) + let selectedProvider = 'none' + Object.keys(textToSpeechProviders).forEach((key) => { + const providerConfig = textToSpeechConfig[key] + if (providerConfig && providerConfig.status) { + selectedProvider = key + } + }) + setSelectedProvider(selectedProvider) + setTextToSpeech(textToSpeechConfig) + handleProviderChange(selectedProvider, textToSpeechConfig) + } catch { + setTextToSpeech(null) + setSelectedProvider('none') + } + } + + return () => { + setTextToSpeech(null) + setSelectedProvider('none') + setVoices([]) + resetTestAudio() + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [dialogProps]) + + return ( + <> + + Providers + + + + + {selectedProvider !== 'none' && ( + <> + + +
    + TTS Provider +
    +
    + + {textToSpeechProviders[selectedProvider].url} + + } + /> +
    + {textToSpeechProviders[selectedProvider].inputs.map((inputParam) => ( + +
    + + {inputParam.label} + {!inputParam.optional &&  *} + {inputParam.description && ( + + )} + +
    + {inputParam.type === 'credential' && ( + { + setValue(newValue, selectedProvider, 'credentialId') + // Load voices when credential is updated + if (newValue && selectedProvider !== 'none') { + setTimeout(() => loadVoicesForProvider(selectedProvider, newValue), 100) + } + }} + /> + )} + {inputParam.type === 'boolean' && ( + setValue(newValue, selectedProvider, inputParam.name)} + value={ + textToSpeech?.[selectedProvider] + ? textToSpeech[selectedProvider][inputParam.name] + : inputParam.default ?? false + } + /> + )} + {(inputParam.type === 'string' || inputParam.type === 'password' || inputParam.type === 'number') && ( + setValue(newValue, selectedProvider, inputParam.name)} + value={ + textToSpeech?.[selectedProvider] + ? textToSpeech[selectedProvider][inputParam.name] + : inputParam.default ?? '' + } + /> + )} + {inputParam.type === 'options' && ( + setValue(newValue, selectedProvider, inputParam.name)} + value={ + textToSpeech?.[selectedProvider] + ? textToSpeech[selectedProvider][inputParam.name] + : inputParam.default ?? 'choose an option' + } + /> + )} + {inputParam.type === 'voice_select' && ( + option.name || ''} + value={ + voices.find( + (voice) => + voice.id === (textToSpeech?.[selectedProvider]?.[inputParam.name] || inputParam.default) + ) || null + } + onChange={(event, newValue) => { + setValue(newValue ? newValue.id : '', selectedProvider, inputParam.name) + }} + renderInput={(params) => ( + + {loadingVoices ? : null} + {params.InputProps.endAdornment} + + ) + }} + /> + )} + disabled={loadingVoices || !textToSpeech?.[selectedProvider]?.credentialId} + /> + )} +
    + ))} + + {/* Auto-play Toggle */} + +
    + + Automatically play audio + + +
    + setValue(newValue, selectedProvider, 'autoPlay')} + value={textToSpeech?.[selectedProvider] ? textToSpeech[selectedProvider].autoPlay ?? false : false} + /> +
    + + {/* Test Voice Section */} + + + + Test Voice + + + + Test text: "Today is a wonderful day to build something with Flowise!" + + + + + {/* Hidden audio element for waveform control */} + {testAudioSrc && ( + + )} + + + )} + + Save + + + ) +} + +TextToSpeech.propTypes = { + dialogProps: PropTypes.object +} + +export default TextToSpeech diff --git a/packages/ui/src/ui-component/form/settings.jsx b/packages/ui/src/ui-component/form/settings.jsx new file mode 100644 index 000000000..9d785b58f --- /dev/null +++ b/packages/ui/src/ui-component/form/settings.jsx @@ -0,0 +1,66 @@ +import { useTheme } from '@mui/material/styles' +import { Box, Typography } from '@mui/material' +import { gridSpacing } from '@/store/constant' +import PropTypes from 'prop-types' + +const SettingsSection = ({ action, children, title }) => { + const theme = useTheme() + + return ( + + + + {title} + + + + {children} + + {action && ( + + {action} + + )} + + ) +} + +SettingsSection.propTypes = { + action: PropTypes.node, + children: PropTypes.node, + title: PropTypes.string +} + +export default SettingsSection diff --git a/packages/ui/src/ui-component/input/Input.jsx b/packages/ui/src/ui-component/input/Input.jsx index 7571726f9..246862510 100644 --- a/packages/ui/src/ui-component/input/Input.jsx +++ b/packages/ui/src/ui-component/input/Input.jsx @@ -32,6 +32,8 @@ export const Input = ({ inputParam, value, nodes, edges, nodeId, onChange, disab return 'password' case 'number': return 'number' + case 'email': + return 'email' default: return 'text' } diff --git a/packages/ui/src/ui-component/input/RichInput.jsx b/packages/ui/src/ui-component/input/RichInput.jsx index 50c7e7cf3..b020984f9 100644 --- a/packages/ui/src/ui-component/input/RichInput.jsx +++ b/packages/ui/src/ui-component/input/RichInput.jsx @@ -1,19 +1,26 @@ import { useState, useEffect } from 'react' import PropTypes from 'prop-types' +import { useSelector } from 'react-redux' import { useEditor, EditorContent } from '@tiptap/react' import Placeholder from '@tiptap/extension-placeholder' import { mergeAttributes } from '@tiptap/core' import StarterKit from '@tiptap/starter-kit' import { styled } from '@mui/material/styles' import { Box } from '@mui/material' -import Mention from '@tiptap/extension-mention' +import CodeBlockLowlight from '@tiptap/extension-code-block-lowlight' +import { common, createLowlight } from 'lowlight' import { suggestionOptions } from './suggestionOption' import { getAvailableNodesForVariable } from '@/utils/genericHelper' +import { CustomMention } from '@/utils/customMention' + +const lowlight = createLowlight(common) // define your extension array const extensions = (availableNodesForVariable, availableState, acceptNodeOutputAsVariable, nodes, nodeData, isNodeInsideInteration) => [ - StarterKit, - Mention.configure({ + StarterKit.configure({ + codeBlock: false + }), + CustomMention.configure({ HTMLAttributes: { class: 'variable' }, @@ -33,11 +40,16 @@ const extensions = (availableNodesForVariable, availableState, acceptNodeOutputA isNodeInsideInteration ), deleteTriggerWithBackspace: true + }), + CodeBlockLowlight.configure({ + lowlight, + enableTabIndentation: true, + tabSize: 2 }) ] // Add styled component for editor wrapper -const StyledEditorContent = styled(EditorContent)(({ theme, rows }) => ({ +const StyledEditorContent = styled(EditorContent)(({ theme, rows, disabled, isDarkMode }) => ({ '& .ProseMirror': { padding: '0px 14px', height: rows ? `${rows * 1.4375}rem` : '2.4rem', @@ -45,37 +57,46 @@ const StyledEditorContent = styled(EditorContent)(({ theme, rows }) => ({ overflowX: rows ? 'auto' : 'hidden', lineHeight: rows ? '1.4375em' : '0.875em', fontWeight: 500, - color: theme.palette.grey[900], + color: disabled ? theme.palette.action.disabled : theme.palette.grey[900], border: `1px solid ${theme.palette.grey[900] + 25}`, borderRadius: '10px', backgroundColor: theme.palette.textBackground.main, boxSizing: 'border-box', whiteSpace: rows ? 'pre-wrap' : 'nowrap', '&:hover': { - borderColor: theme.palette.text.primary, - cursor: 'text' + borderColor: disabled ? `${theme.palette.grey[900] + 25}` : theme.palette.text.primary, + cursor: disabled ? 'default' : 'text' }, '&:focus': { - borderColor: theme.palette.primary.main, + borderColor: disabled ? `${theme.palette.grey[900] + 25}` : theme.palette.primary.main, outline: 'none' }, - '&[disabled]': { - backgroundColor: theme.palette.action.disabledBackground, - color: theme.palette.action.disabled - }, // Placeholder for first paragraph when editor is empty '& p.is-editor-empty:first-of-type::before': { content: 'attr(data-placeholder)', float: 'left', - color: theme.palette.text.primary, - opacity: 0.4, + color: disabled ? theme.palette.action.disabled : theme.palette.text.primary, + opacity: disabled ? 0.6 : 0.4, pointerEvents: 'none', height: 0 - } + }, + // Set CSS custom properties for theme-aware styling based on the screenshot + '--code-bg': isDarkMode ? '#2d2d2d' : '#f5f5f5', + '--code-color': isDarkMode ? '#d4d4d4' : '#333333', + '--hljs-comment': isDarkMode ? '#6a9955' : '#6a9955', + '--hljs-variable': isDarkMode ? '#9cdcfe' : '#d73a49', // Light blue for variables (var, i) + '--hljs-number': isDarkMode ? '#b5cea8' : '#e36209', // Light green for numbers (1, 20, 15, etc.) + '--hljs-string': isDarkMode ? '#ce9178' : '#22863a', // Orange/peach for strings ("FizzBuzz", "Fizz", "Buzz") + '--hljs-title': isDarkMode ? '#dcdcaa' : '#6f42c1', // Yellow for function names (log) + '--hljs-keyword': isDarkMode ? '#569cd6' : '#005cc5', // Blue for keywords (for, if, else) + '--hljs-operator': isDarkMode ? '#d4d4d4' : '#333333', // White/gray for operators (=, %, ==, etc.) + '--hljs-punctuation': isDarkMode ? '#d4d4d4' : '#333333' // White/gray for punctuation ({, }, ;, etc.) } })) export const RichInput = ({ inputParam, value, nodes, edges, nodeId, onChange, disabled = false }) => { + const customization = useSelector((state) => state.customization) + const isDarkMode = customization.isDarkMode const [availableNodesForVariable, setAvailableNodesForVariable] = useState([]) const [availableState, setAvailableState] = useState([]) const [nodeData, setNodeData] = useState({}) @@ -121,7 +142,7 @@ export const RichInput = ({ inputParam, value, nodes, edges, nodeId, onChange, d return ( - + ) } diff --git a/packages/ui/src/ui-component/input/suggestionOption.js b/packages/ui/src/ui-component/input/suggestionOption.js index 229870e2b..0247c8a05 100644 --- a/packages/ui/src/ui-component/input/suggestionOption.js +++ b/packages/ui/src/ui-component/input/suggestionOption.js @@ -59,10 +59,22 @@ export const suggestionOptions = ( description: 'Past conversation history between user and AI', category: 'Chat Context' }, + { + id: 'current_date_time', + mentionLabel: 'current_date_time', + description: 'Current date and time', + category: 'Chat Context' + }, { id: 'runtime_messages_length', mentionLabel: 'runtime_messages_length', - description: 'Total messsages between LLM and Agent', + description: 'Total messages between LLM and Agent', + category: 'Chat Context' + }, + { + id: 'loop_count', + mentionLabel: 'loop_count', + description: 'Current loop count', category: 'Chat Context' }, { @@ -100,7 +112,7 @@ export const suggestionOptions = ( category: 'Node Outputs' }) - const structuredOutputs = nodeData?.inputs?.llmStructuredOutput ?? [] + const structuredOutputs = nodeData?.inputs?.llmStructuredOutput ?? nodeData?.inputs?.agentStructuredOutput ?? [] if (structuredOutputs && structuredOutputs.length > 0) { structuredOutputs.forEach((item) => { defaultItems.unshift({ diff --git a/packages/ui/src/ui-component/json/JsonViewer.jsx b/packages/ui/src/ui-component/json/JsonViewer.jsx index 823478e61..6d6b72bc7 100644 --- a/packages/ui/src/ui-component/json/JsonViewer.jsx +++ b/packages/ui/src/ui-component/json/JsonViewer.jsx @@ -3,31 +3,85 @@ import { Box } from '@mui/material' import { useTheme } from '@mui/material/styles' import PropTypes from 'prop-types' -// Syntax highlighting function for JSON -function syntaxHighlight(json) { - if (!json) return '' // No JSON from response - - json = json.replace(/&/g, '&').replace(//g, '>') - - return json.replace( - // eslint-disable-next-line - /("(\\u[a-zA-Z0-9]{4}|\\[^u]|[^\\"])*"(\s*:)?|\b(true|false|null)\b|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?)/g, - function (match) { - let cls = 'number' - if (/^"/.test(match)) { - if (/:$/.test(match)) { - cls = 'key' - } else { - cls = 'string' - } - } else if (/true|false/.test(match)) { - cls = 'boolean' - } else if (/null/.test(match)) { - cls = 'null' - } - return '' + match + '' +const JsonToken = ({ type, children, isDarkMode }) => { + const getTokenStyle = (tokenType) => { + switch (tokenType) { + case 'string': + return { color: isDarkMode ? '#9cdcfe' : 'green' } + case 'number': + return { color: isDarkMode ? '#b5cea8' : 'darkorange' } + case 'boolean': + return { color: isDarkMode ? '#569cd6' : 'blue' } + case 'null': + return { color: isDarkMode ? '#d4d4d4' : 'magenta' } + case 'key': + return { color: isDarkMode ? '#ff5733' : '#ff5733' } + default: + return {} } - ) + } + + return {children} +} + +function parseJsonToElements(json, isDarkMode) { + if (!json) return [] + + const tokens = [] + let index = 0 + + // Escape HTML characters for safety + const escapedJson = json.replace(/&/g, '&').replace(//g, '>') + + // eslint-disable-next-line + const tokenRegex = /("(\\u[a-zA-Z0-9]{4}|\\[^u]|[^\\"])*"(\s*:)?|\b(true|false|null)\b|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?)/g + + let match + let lastIndex = 0 + + while ((match = tokenRegex.exec(escapedJson)) !== null) { + // Add any text before the match as plain text + if (match.index > lastIndex) { + const plainText = escapedJson.substring(lastIndex, match.index) + if (plainText) { + tokens.push({plainText}) + } + } + + // Determine token type + let tokenType = 'number' + const matchText = match[0] + + if (/^"/.test(matchText)) { + if (/:$/.test(matchText)) { + tokenType = 'key' + } else { + tokenType = 'string' + } + } else if (/true|false/.test(matchText)) { + tokenType = 'boolean' + } else if (/null/.test(matchText)) { + tokenType = 'null' + } + + tokens.push( + + {matchText} + + ) + + lastIndex = match.index + match[0].length + } + + // Add any remaining text + if (lastIndex < escapedJson.length) { + const remainingText = escapedJson.substring(lastIndex) + if (remainingText) { + tokens.push({remainingText}) + } + } + + return tokens } export const JSONViewer = ({ data, maxHeight = '400px' }) => { @@ -35,6 +89,9 @@ export const JSONViewer = ({ data, maxHeight = '400px' }) => { const customization = useSelector((state) => state.customization) const isDarkMode = customization.isDarkMode + const jsonString = JSON.stringify(data, null, 2) + const jsonElements = parseJsonToElements(jsonString, isDarkMode) + return ( { maxHeight: maxHeight }} > -
     {
                         whiteSpace: 'pre-wrap',
                         wordBreak: 'break-word'
                     }}
    -                dangerouslySetInnerHTML={{
    -                    __html: syntaxHighlight(JSON.stringify(data, null, 2), isDarkMode)
    -                }}
    -            />
    +            >
    +                {jsonElements}
    +            
    ) } @@ -85,3 +124,9 @@ JSONViewer.propTypes = { data: PropTypes.object, maxHeight: PropTypes.string } + +JsonToken.propTypes = { + type: PropTypes.string.isRequired, + children: PropTypes.node.isRequired, + isDarkMode: PropTypes.bool.isRequired +} diff --git a/packages/ui/src/ui-component/markdown/Markdown.css b/packages/ui/src/ui-component/markdown/Markdown.css index c152a97ed..f91adaafe 100644 --- a/packages/ui/src/ui-component/markdown/Markdown.css +++ b/packages/ui/src/ui-component/markdown/Markdown.css @@ -19,3 +19,55 @@ border-width: 1px !important; padding: 10px !important; } + +.react-markdown h1, +.react-markdown h2, +.react-markdown h3, +.react-markdown h4, +.react-markdown h5, +.react-markdown h6 { + line-height: 1.4; + margin: 0.8em 0 0.4em 0; + font-weight: 600; +} + +.react-markdown h1 { + font-size: 1.8em; +} + +.react-markdown h2 { + font-size: 1.5em; +} + +.react-markdown h3 { + font-size: 1.3em; +} + +.react-markdown h4 { + font-size: 1.1em; +} + +.react-markdown h5 { + font-size: 1em; + font-weight: 700; +} + +.react-markdown h6 { + font-size: 0.9em; + font-weight: 700; +} + +.react-markdown p { + line-height: 1.6; + margin: 0.5em 0; +} + +.react-markdown img { + max-width: 100%; + max-height: 400px; + height: auto; + object-fit: contain; + border-radius: 8px; + box-shadow: 0 2px 8px rgba(0, 0, 0, 0.1); + margin: 10px 0; +} diff --git a/packages/ui/src/ui-component/markdown/MemoizedReactMarkdown.jsx b/packages/ui/src/ui-component/markdown/MemoizedReactMarkdown.jsx index c0e3b2054..73b1cb02a 100644 --- a/packages/ui/src/ui-component/markdown/MemoizedReactMarkdown.jsx +++ b/packages/ui/src/ui-component/markdown/MemoizedReactMarkdown.jsx @@ -6,7 +6,6 @@ import { CodeBlock } from '../markdown/CodeBlock' import remarkGfm from 'remark-gfm' import remarkMath from 'remark-math' import rehypeMathjax from 'rehype-mathjax' -import rehypeRaw from 'rehype-raw' /** * Checks if text likely contains LaTeX math notation @@ -91,7 +90,7 @@ export const MemoizedReactMarkdown = memo( const rehypePlugins = useMemo(() => { if (props.rehypePlugins) return props.rehypePlugins - return shouldEnableMath ? [rehypeMathjax, rehypeRaw] : [rehypeRaw] + return shouldEnableMath ? [rehypeMathjax] : [] }, [props.rehypePlugins, shouldEnableMath]) return ( diff --git a/packages/ui/src/ui-component/pagination/TablePagination.jsx b/packages/ui/src/ui-component/pagination/TablePagination.jsx new file mode 100644 index 000000000..ef6059cdc --- /dev/null +++ b/packages/ui/src/ui-component/pagination/TablePagination.jsx @@ -0,0 +1,85 @@ +import { Box, FormControl, MenuItem, Pagination, Select, Typography } from '@mui/material' +import { useEffect, useState } from 'react' +import { useTheme } from '@mui/material/styles' +import { useSelector } from 'react-redux' +import PropTypes from 'prop-types' + +export const DEFAULT_ITEMS_PER_PAGE = 12 + +const TablePagination = ({ currentPage, limit, total, onChange }) => { + const theme = useTheme() + const customization = useSelector((state) => state.customization) + const borderColor = theme.palette.grey[900] + 25 + + const [itemsPerPage, setItemsPerPage] = useState(DEFAULT_ITEMS_PER_PAGE) + const [activePage, setActivePage] = useState(1) + const [totalItems, setTotalItems] = useState(0) + + useEffect(() => { + setTotalItems(total) + }, [total]) + + useEffect(() => { + setItemsPerPage(limit) + }, [limit]) + + useEffect(() => { + setActivePage(currentPage) + }, [currentPage]) + + const handlePageChange = (event, value) => { + setActivePage(value) + onChange(value, itemsPerPage) + } + + const handleLimitChange = (event) => { + const itemsPerPage = parseInt(event.target.value, 10) + setItemsPerPage(itemsPerPage) + setActivePage(1) + onChange(1, itemsPerPage) + } + + return ( + + + Items per page: + + + + + {totalItems > 0 && ( + + Items {activePage * itemsPerPage - itemsPerPage + 1} to{' '} + {activePage * itemsPerPage > totalItems ? totalItems : activePage * itemsPerPage} of {totalItems} + + )} + + + ) +} + +TablePagination.propTypes = { + onChange: PropTypes.func.isRequired, + currentPage: PropTypes.number, + limit: PropTypes.number, + total: PropTypes.number +} + +export default TablePagination diff --git a/packages/ui/src/ui-component/rbac/available.jsx b/packages/ui/src/ui-component/rbac/available.jsx new file mode 100644 index 000000000..884bfeea0 --- /dev/null +++ b/packages/ui/src/ui-component/rbac/available.jsx @@ -0,0 +1,14 @@ +import PropTypes from 'prop-types' +import { useAuth } from '@/hooks/useAuth' + +export const Available = ({ permission, children }) => { + const { hasPermission } = useAuth() + if (hasPermission(permission)) { + return children + } +} + +Available.propTypes = { + permission: PropTypes.string, + children: PropTypes.element +} diff --git a/packages/ui/src/ui-component/safe/SafeHTML.jsx b/packages/ui/src/ui-component/safe/SafeHTML.jsx new file mode 100644 index 000000000..3ef11b3fe --- /dev/null +++ b/packages/ui/src/ui-component/safe/SafeHTML.jsx @@ -0,0 +1,58 @@ +import PropTypes from 'prop-types' +import DOMPurify from 'dompurify' + +/** + * SafeHTML component that sanitizes HTML content before rendering + */ +export const SafeHTML = ({ html, allowedTags, allowedAttributes, ...props }) => { + // Configure DOMPurify options + const config = { + ALLOWED_TAGS: allowedTags || [ + 'p', + 'br', + 'strong', + 'em', + 'u', + 'i', + 'b', + 'h1', + 'h2', + 'h3', + 'h4', + 'h5', + 'h6', + 'ul', + 'ol', + 'li', + 'blockquote', + 'pre', + 'code', + 'a', + 'img', + 'table', + 'thead', + 'tbody', + 'tr', + 'th', + 'td', + 'div', + 'span' + ], + ALLOWED_ATTR: allowedAttributes || ['href', 'title', 'alt', 'src', 'class', 'id', 'style'], + ALLOW_DATA_ATTR: false, + FORBID_SCRIPT: true, + FORBID_TAGS: ['script', 'object', 'embed', 'form', 'input'], + FORBID_ATTR: ['onerror', 'onload', 'onclick', 'onmouseover'] + } + + // Sanitize the HTML content + const sanitizedHTML = DOMPurify.sanitize(html || '', config) + + return
    +} + +SafeHTML.propTypes = { + html: PropTypes.string.isRequired, + allowedTags: PropTypes.arrayOf(PropTypes.string), + allowedAttributes: PropTypes.arrayOf(PropTypes.string) +} diff --git a/packages/ui/src/ui-component/subscription/PricingDialog.jsx b/packages/ui/src/ui-component/subscription/PricingDialog.jsx new file mode 100644 index 000000000..7f95f2d7c --- /dev/null +++ b/packages/ui/src/ui-component/subscription/PricingDialog.jsx @@ -0,0 +1,670 @@ +import { useState, useEffect, useMemo } from 'react' +import { useSelector } from 'react-redux' +import PropTypes from 'prop-types' +import { + Dialog, + DialogContent, + DialogTitle, + Grid, + Typography, + Button, + IconButton, + Box, + CircularProgress, + DialogActions +} from '@mui/material' +import { IconX, IconCheck, IconCreditCard, IconExternalLink, IconAlertCircle } from '@tabler/icons-react' +import { useTheme, alpha } from '@mui/material/styles' +import accountApi from '@/api/account.api' +import pricingApi from '@/api/pricing' +import workspaceApi from '@/api/workspace' +import userApi from '@/api/user' +import useApi from '@/hooks/useApi' +import { useSnackbar } from 'notistack' +import { store } from '@/store' +import { upgradePlanSuccess } from '@/store/reducers/authSlice' + +const PricingDialog = ({ open, onClose }) => { + const customization = useSelector((state) => state.customization) + const currentUser = useSelector((state) => state.auth.user) + const theme = useTheme() + const { enqueueSnackbar } = useSnackbar() + + const [openPlanDialog, setOpenPlanDialog] = useState(false) + const [selectedPlan, setSelectedPlan] = useState(null) + const [prorationInfo, setProrationInfo] = useState(null) + const [isUpdatingPlan, setIsUpdatingPlan] = useState(false) + const [purchasedSeats, setPurchasedSeats] = useState(0) + const [occupiedSeats, setOccupiedSeats] = useState(0) + const [workspaceCount, setWorkspaceCount] = useState(0) + const [isOpeningBillingPortal, setIsOpeningBillingPortal] = useState(false) + + const getPricingPlansApi = useApi(pricingApi.getPricingPlans) + const getCustomerDefaultSourceApi = useApi(userApi.getCustomerDefaultSource) + const getPlanProrationApi = useApi(userApi.getPlanProration) + const getAdditionalSeatsQuantityApi = useApi(userApi.getAdditionalSeatsQuantity) + const getAllWorkspacesApi = useApi(workspaceApi.getAllWorkspacesByOrganizationId) + + useEffect(() => { + getPricingPlansApi.request() + getAdditionalSeatsQuantityApi.request(currentUser?.activeOrganizationSubscriptionId) + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + const handlePlanClick = async (plan) => { + if (plan.title === 'Enterprise') { + window.location.href = 'mailto:hello@flowiseai.com' + return + } + + setSelectedPlan(plan) + setOpenPlanDialog(true) + getCustomerDefaultSourceApi.request(currentUser?.activeOrganizationCustomerId) + } + + const handleBillingPortalClick = async () => { + setIsOpeningBillingPortal(true) + try { + const response = await accountApi.getBillingData() + if (response.data?.url) { + setOpenPlanDialog(false) + window.open(response.data.url, '_blank') + } + } catch (error) { + console.error('Error accessing billing portal:', error) + } + setIsOpeningBillingPortal(false) + } + + const handleUpdatePlan = async () => { + if (!selectedPlan || !prorationInfo) return + + setIsUpdatingPlan(true) + try { + const response = await userApi.updateSubscriptionPlan( + currentUser.activeOrganizationSubscriptionId, + selectedPlan.prodId, + prorationInfo.prorationDate + ) + if (response.data.status === 'success') { + // Subscription updated successfully + store.dispatch(upgradePlanSuccess(response.data.user)) + enqueueSnackbar('Subscription updated successfully!', { variant: 'success' }) + onClose(true) + } else { + const errorMessage = response.data.message || 'Subscription failed to update' + enqueueSnackbar(errorMessage, { variant: 'error' }) + onClose() + } + } catch (error) { + console.error('Error updating plan:', error) + const errorMessage = err.response?.data?.message || 'Failed to verify subscription' + enqueueSnackbar(errorMessage, { variant: 'error' }) + onClose() + } finally { + setIsUpdatingPlan(false) + setOpenPlanDialog(false) + } + } + + useEffect(() => { + if (getAllWorkspacesApi.data) { + setWorkspaceCount(getAllWorkspacesApi.data?.length || 0) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getAllWorkspacesApi.data]) + + useEffect(() => { + if ( + getCustomerDefaultSourceApi.data && + getCustomerDefaultSourceApi.data?.invoice_settings?.default_payment_method && + currentUser?.activeOrganizationSubscriptionId + ) { + getPlanProrationApi.request(currentUser.activeOrganizationSubscriptionId, selectedPlan.prodId) + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getCustomerDefaultSourceApi.data]) + + useEffect(() => { + if (getPlanProrationApi.data) { + setProrationInfo(getPlanProrationApi.data) + } + }, [getPlanProrationApi.data]) + + useEffect(() => { + if (getAdditionalSeatsQuantityApi.data) { + const purchased = getAdditionalSeatsQuantityApi.data?.quantity || 0 + const occupied = getAdditionalSeatsQuantityApi.data?.totalOrgUsers || 1 + + setPurchasedSeats(purchased) + setOccupiedSeats(occupied) + } + }, [getAdditionalSeatsQuantityApi.data]) + + const pricingPlans = useMemo(() => { + if (!getPricingPlansApi.data) return [] + + return getPricingPlansApi.data.map((plan) => { + // Enterprise plan has special handling + if (plan.title === 'Enterprise') { + return { + ...plan, + buttonText: 'Contact Us', + buttonVariant: 'outlined', + buttonAction: () => handlePlanClick(plan) + } + } + + const isCurrentPlanValue = currentUser?.activeOrganizationProductId === plan.prodId + const isStarterPlan = plan.title === 'Starter' + + if (isCurrentPlanValue && (plan.title === 'Pro' || plan.title === 'Enterprise')) { + getAllWorkspacesApi.request(currentUser?.activeOrganizationId) + } + + return { + ...plan, + currentPlan: isCurrentPlanValue, + isStarterPlan, + buttonText: isCurrentPlanValue ? 'Current Plan' : 'Get Started', + buttonVariant: plan.mostPopular ? 'contained' : 'outlined', + disabled: isCurrentPlanValue || !currentUser.isOrganizationAdmin, + buttonAction: () => handlePlanClick(plan) + } + }) + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getPricingPlansApi.data, currentUser.isOrganizationAdmin]) + + const handleClose = () => { + if (!isUpdatingPlan) { + setProrationInfo(null) + onClose() + } + } + + const handlePlanDialogClose = () => { + if (!isUpdatingPlan) { + setProrationInfo(null) + setOpenPlanDialog(false) + } + } + + return ( + <> + theme.palette.background.default, + boxShadow: customization.isDarkMode ? '0 0 50px 0 rgba(255, 255, 255, 0.5)' : '0 0 10px 0 rgba(0, 0, 0, 0.1)' + } + }} + > + + Pricing Plans + + + + + + + {pricingPlans.map((plan) => ( + + + plan.mostPopular + ? theme.palette.primary.main + : plan.currentPlan + ? theme.palette.success.main + : theme.palette.background.paper, + borderRadius: 2, + display: 'flex', + flexDirection: 'column', + minHeight: '450px', + position: 'relative', + boxShadow: customization.isDarkMode + ? '0 0 10px 0 rgba(255, 255, 255, 0.5)' + : '0 0 10px 0 rgba(0, 0, 0, 0.1)', + backgroundColor: (theme) => (plan.currentPlan ? alpha(theme.palette.success.main, 0.05) : 'inherit') + }} + > + {plan.currentPlan && ( + + + Current Plan + + + )} + {plan.mostPopular && !plan.currentPlan && ( + + + Most Popular + + + )} + + {plan.title} + + + {plan.subtitle} + + + + {plan.price} + + {plan.period && ( + + {plan.period} + + )} + + + {plan.features.map((feature, index) => ( + + + + {feature.text} + {feature.subtext && ( + + {feature.subtext} + + )} + + + ))} + + {plan.isStarterPlan && !plan.currentPlan && ( + + + First Month Free + + + )} + + + + ))} + + + + + + Confirm Plan Change + + + {purchasedSeats > 0 || occupiedSeats > 1 ? ( + + + You must remove additional seats and users before changing your plan. + + ) : workspaceCount > 1 ? ( + <> + + + You must remove all workspaces except the default workspace before changing your plan. + + + ) : ( + <> + {getCustomerDefaultSourceApi.loading ? ( + + ) : getCustomerDefaultSourceApi.data?.invoice_settings?.default_payment_method ? ( + + Payment Method + + {getCustomerDefaultSourceApi.data.invoice_settings.default_payment_method.card && ( + <> + + + + { + getCustomerDefaultSourceApi.data.invoice_settings.default_payment_method + .card.brand + } + + + โ€ขโ€ขโ€ขโ€ข{' '} + { + getCustomerDefaultSourceApi.data.invoice_settings.default_payment_method + .card.last4 + } + + + (expires{' '} + { + getCustomerDefaultSourceApi.data.invoice_settings.default_payment_method + .card.exp_month + } + / + { + getCustomerDefaultSourceApi.data.invoice_settings.default_payment_method + .card.exp_year + } + ) + + + + )} + + + ) : ( + + + + No payment method found + + + + )} + + {getPlanProrationApi.loading && ( + + + + )} + + {prorationInfo && ( + + {/* Date Range */} + + {new Date(prorationInfo.currentPeriodStart * 1000).toLocaleDateString('en-US', { + month: 'short', + day: 'numeric' + })}{' '} + -{' '} + {new Date(prorationInfo.currentPeriodEnd * 1000).toLocaleDateString('en-US', { + month: 'short', + day: 'numeric', + year: 'numeric' + })} + + + {/* First Month Free Notice */} + {selectedPlan?.title === 'Starter' && prorationInfo.eligibleForFirstMonthFree && ( + + + {`You're eligible for your first month free!`} + + + )} + + {/* Base Plan */} + + {selectedPlan.title} Plan + + {prorationInfo.currency} {Math.max(0, prorationInfo.newPlanAmount).toFixed(2)} + + + + {selectedPlan?.title === 'Starter' && prorationInfo.eligibleForFirstMonthFree && ( + + First Month Discount + + -{prorationInfo.currency} {Math.max(0, prorationInfo.newPlanAmount).toFixed(2)} + + + )} + + {/* Credit Balance */} + {prorationInfo.prorationAmount > 0 && prorationInfo.creditBalance !== 0 && ( + + Applied account balance + + {prorationInfo.currency} {prorationInfo.creditBalance.toFixed(2)} + + + )} + + {prorationInfo.prorationAmount < 0 && ( + + Credit balance + + {prorationInfo.currency} {prorationInfo.prorationAmount < 0 ? '+' : ''} + {Math.abs(prorationInfo.prorationAmount).toFixed(2)} + + + )} + + {/* Next Payment */} + + Due today + + {prorationInfo.currency}{' '} + {Math.max(0, prorationInfo.prorationAmount + prorationInfo.creditBalance).toFixed(2)} + + + + {prorationInfo.prorationAmount < 0 && ( + + Your available credit will automatically apply to your next invoice. + + )} + + )} + + )} + + + {getCustomerDefaultSourceApi.data?.invoice_settings?.default_payment_method && ( + + + + + )} + + + ) +} + +PricingDialog.propTypes = { + open: PropTypes.bool, + onClose: PropTypes.func +} + +export default PricingDialog diff --git a/packages/ui/src/ui-component/table/DocumentStoreTable.jsx b/packages/ui/src/ui-component/table/DocumentStoreTable.jsx new file mode 100644 index 000000000..0f5df0089 --- /dev/null +++ b/packages/ui/src/ui-component/table/DocumentStoreTable.jsx @@ -0,0 +1,255 @@ +import { useState } from 'react' +import PropTypes from 'prop-types' +import { useSelector } from 'react-redux' +import { styled } from '@mui/material/styles' +import { + Box, + Paper, + Skeleton, + Table, + TableBody, + TableCell, + TableContainer, + TableHead, + TableRow, + TableSortLabel, + useTheme, + Typography +} from '@mui/material' +import { tableCellClasses } from '@mui/material/TableCell' +import DocumentStoreStatus from '@/views/docstore/DocumentStoreStatus' + +const StyledTableCell = styled(TableCell)(({ theme }) => ({ + borderColor: theme.palette.grey[900] + 25, + + [`&.${tableCellClasses.head}`]: { + color: theme.palette.grey[900] + }, + [`&.${tableCellClasses.body}`]: { + fontSize: 14, + height: 64 + } +})) + +const StyledTableRow = styled(TableRow)(() => ({ + // hide last border + '&:last-child td, &:last-child th': { + border: 0 + } +})) + +export const DocumentStoreTable = ({ data, isLoading, onRowClick, images }) => { + const theme = useTheme() + const customization = useSelector((state) => state.customization) + + const localStorageKeyOrder = 'doc_store_order' + const localStorageKeyOrderBy = 'doc_store_orderBy' + + const [order, setOrder] = useState(localStorage.getItem(localStorageKeyOrder) || 'desc') + const [orderBy, setOrderBy] = useState(localStorage.getItem(localStorageKeyOrderBy) || 'name') + + const handleRequestSort = (property) => { + const isAsc = orderBy === property && order === 'asc' + const newOrder = isAsc ? 'desc' : 'asc' + setOrder(newOrder) + setOrderBy(property) + localStorage.setItem(localStorageKeyOrder, newOrder) + localStorage.setItem(localStorageKeyOrderBy, property) + } + + const sortedData = data + ? [...data].sort((a, b) => { + if (orderBy === 'name') { + return order === 'asc' ? (a.name || '').localeCompare(b.name || '') : (b.name || '').localeCompare(a.name || '') + } + return 0 + }) + : [] + + return ( + <> + + + + +   + + handleRequestSort('name')}> + Name + + + Description + Connected flows + Total characters + Total chunks + Loader Types + + + + {isLoading ? ( + <> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ) : ( + <> + {sortedData.map((row, index) => { + return ( + onRowClick(row)} + hover + key={index} + sx={{ cursor: 'pointer', '&:last-child td, &:last-child th': { border: 0 } }} + > + + + + + + {row.name} + + + + + {row?.description} + + + {row.whereUsed?.length ?? 0} + {row.totalChars} + {row.totalChunks} + + {images && images[row.id] && ( + + {images[row.id] + .slice(0, images[row.id].length > 3 ? 3 : images[row.id].length) + .map((img) => ( + + + + ))} + {images?.length > 3 && ( + + + {images.length - 3} More + + )} + + )} + + + ) + })} + + )} + +
    +
    + + ) +} + +DocumentStoreTable.propTypes = { + data: PropTypes.array, + isLoading: PropTypes.bool, + images: PropTypes.object, + onRowClick: PropTypes.func +} + +DocumentStoreTable.displayName = 'DocumentStoreTable' diff --git a/packages/ui/src/ui-component/table/FilesTable.jsx b/packages/ui/src/ui-component/table/FilesTable.jsx new file mode 100644 index 000000000..71734c6ba --- /dev/null +++ b/packages/ui/src/ui-component/table/FilesTable.jsx @@ -0,0 +1,173 @@ +import PropTypes from 'prop-types' +import { useSelector } from 'react-redux' +import { styled } from '@mui/material/styles' +import { + IconButton, + Paper, + Skeleton, + Table, + TableBody, + TableCell, + TableContainer, + TableHead, + TableRow, + Tooltip, + Typography, + useTheme +} from '@mui/material' +import { tableCellClasses } from '@mui/material/TableCell' +import { IconTrash } from '@tabler/icons-react' + +const StyledTableCell = styled(TableCell)(({ theme }) => ({ + borderColor: theme.palette.grey[900] + 25, + + [`&.${tableCellClasses.head}`]: { + color: theme.palette.grey[900] + }, + [`&.${tableCellClasses.body}`]: { + fontSize: 14, + height: 64 + } +})) + +const StyledTableRow = styled(TableRow)(() => ({ + // hide last border + '&:last-child td, &:last-child th': { + border: 0 + } +})) + +export const FilesTable = ({ data, isLoading, filterFunction, handleDelete }) => { + const theme = useTheme() + const customization = useSelector((state) => state.customization) + + return ( + <> + + + + + + Name + + + Path + + + Size + + + Actions + + + + + {isLoading ? ( + <> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ) : ( + <> + {data?.filter(filterFunction).map((row, index) => ( + + + + + {row.name.split('/').pop()} + + + + + + + {row.path} + + + + + + {`${row.size.toFixed(2)} MB`} + + + + handleDelete(row)} size='small'> + + + + + ))} + + )} + +
    +
    + + ) +} + +FilesTable.propTypes = { + data: PropTypes.array, + images: PropTypes.object, + isLoading: PropTypes.bool, + filterFunction: PropTypes.func, + handleDelete: PropTypes.func +} diff --git a/packages/ui/src/ui-component/table/FlowListTable.jsx b/packages/ui/src/ui-component/table/FlowListTable.jsx index 01ab92436..2d1e05704 100644 --- a/packages/ui/src/ui-component/table/FlowListTable.jsx +++ b/packages/ui/src/ui-component/table/FlowListTable.jsx @@ -23,6 +23,9 @@ import { import { tableCellClasses } from '@mui/material/TableCell' import FlowListMenu from '../button/FlowListMenu' import { Link } from 'react-router-dom' +import { useAuth } from '@/hooks/useAuth' + +import MoreItemsTooltip from '../tooltip/MoreItemsTooltip' const StyledTableCell = styled(TableCell)(({ theme }) => ({ borderColor: theme.palette.grey[900] + 25, @@ -47,7 +50,23 @@ const getLocalStorageKeyName = (name, isAgentCanvas) => { return (isAgentCanvas ? 'agentcanvas' : 'chatflowcanvas') + '_' + name } -export const FlowListTable = ({ data, images = {}, icons = {}, isLoading, filterFunction, updateFlowsApi, setError, isAgentCanvas }) => { +export const FlowListTable = ({ + data, + images = {}, + icons = {}, + isLoading, + filterFunction, + updateFlowsApi, + setError, + isAgentCanvas, + isAgentflowV2, + currentPage, + pageLimit +}) => { + const { hasPermission } = useAuth() + const isActionsAvailable = isAgentCanvas + ? hasPermission('agentflows:update,agentflows:delete,agentflows:config,agentflows:domains,templates:flowexport,agentflows:export') + : hasPermission('chatflows:update,chatflows:delete,chatflows:config,chatflows:domains,templates:flowexport,chatflows:export') const theme = useTheme() const customization = useSelector((state) => state.customization) @@ -70,7 +89,7 @@ export const FlowListTable = ({ data, images = {}, icons = {}, isLoading, filter if (!isAgentCanvas) { return `/canvas/${row.id}` } else { - return localStorage.getItem('agentFlowVersion') === 'v2' ? `/v2/agentcanvas/${row.id}` : `/agentcanvas/${row.id}` + return isAgentflowV2 ? `/v2/agentcanvas/${row.id}` : `/agentcanvas/${row.id}` } } @@ -118,9 +137,11 @@ export const FlowListTable = ({ data, images = {}, icons = {}, isLoading, filter Last Modified Date - - Actions - + {isActionsAvailable && ( + + Actions + + )} @@ -139,9 +160,11 @@ export const FlowListTable = ({ data, images = {}, icons = {}, isLoading, filter - - - + {isActionsAvailable && ( + + + + )} @@ -156,9 +179,11 @@ export const FlowListTable = ({ data, images = {}, icons = {}, isLoading, filter - - - + {isActionsAvailable && ( + + + + )} ) : ( @@ -213,64 +238,80 @@ export const FlowListTable = ({ data, images = {}, icons = {}, isLoading, filter }} > {[ - ...(images[row.id] || []).map((img) => ({ type: 'image', src: img })), + ...(images[row.id] || []).map((img) => ({ + type: 'image', + src: img.imageSrc, + label: img.label + })), ...(icons[row.id] || []).map((ic) => ({ type: 'icon', icon: ic.icon, - color: ic.color + color: ic.color, + title: ic.name })) ] .slice(0, 5) - .map((item, index) => - item.type === 'image' ? ( - - ( + + {item.type === 'image' ? ( + - - ) : ( -
    - -
    - ) - )} + > + +
    + ) : ( +
    + +
    + )} + + ))} + {(images[row.id]?.length || 0) + (icons[row.id]?.length || 0) > 5 && ( - ({ label: ic.name })) + ]} > - + {(images[row.id]?.length || 0) + (icons[row.id]?.length || 0) - 5} More - + + + {(images[row.id]?.length || 0) + (icons[row.id]?.length || 0) - 5} More + + )} )} @@ -278,21 +319,26 @@ export const FlowListTable = ({ data, images = {}, icons = {}, isLoading, filter {moment(row.updatedDate).format('MMMM Do, YYYY HH:mm:ss')} - - - - - + {isActionsAvailable && ( + + + + + + )} ))} @@ -312,5 +358,8 @@ FlowListTable.propTypes = { filterFunction: PropTypes.func, updateFlowsApi: PropTypes.object, setError: PropTypes.func, - isAgentCanvas: PropTypes.bool + isAgentCanvas: PropTypes.bool, + isAgentflowV2: PropTypes.bool, + currentPage: PropTypes.number, + pageLimit: PropTypes.number } diff --git a/packages/ui/src/ui-component/table/MarketplaceTable.jsx b/packages/ui/src/ui-component/table/MarketplaceTable.jsx index bc252f7c1..23a35c220 100644 --- a/packages/ui/src/ui-component/table/MarketplaceTable.jsx +++ b/packages/ui/src/ui-component/table/MarketplaceTable.jsx @@ -15,10 +15,10 @@ import { TableRow, Typography, Stack, - useTheme, - IconButton + useTheme } from '@mui/material' -import { IconTrash } from '@tabler/icons-react' +import { IconShare, IconTrash } from '@tabler/icons-react' +import { PermissionIconButton } from '@/ui-component/button/RBACButtons' const StyledTableCell = styled(TableCell)(({ theme }) => ({ borderColor: theme.palette.grey[900] + 25, @@ -49,7 +49,8 @@ export const MarketplaceTable = ({ goToCanvas, goToTool, isLoading, - onDelete + onDelete, + onShare }) => { const theme = useTheme() const customization = useSelector((state) => state.customization) @@ -86,15 +87,8 @@ export const MarketplaceTable = ({ Use cases - Nodes - -   - - {onDelete && ( - - Delete - - )} + Badges + @@ -122,11 +116,6 @@ export const MarketplaceTable = ({ - {onDelete && ( - - - - )} @@ -150,11 +139,6 @@ export const MarketplaceTable = ({ - {onDelete && ( - - - - )} ) : ( @@ -223,27 +207,19 @@ export const MarketplaceTable = ({ - - {row.categories && - row.categories.map((tag, index) => ( - - ))} - - - {row.badge && row.badge .split(';') .map((tag, index) => ( - {onDelete && ( - - onDelete(row)}> - - - - )} + + {row.shared ? ( + Shared Template + ) : ( + <> + {onShare && ( + onShare(row)} + > + + + )} + {onDelete && ( + onDelete(row)} + > + + + )} + + )} + ))} @@ -280,5 +278,6 @@ MarketplaceTable.propTypes = { goToTool: PropTypes.func, goToCanvas: PropTypes.func, isLoading: PropTypes.bool, - onDelete: PropTypes.func + onDelete: PropTypes.func, + onShare: PropTypes.func } diff --git a/packages/ui/src/ui-component/table/Table.jsx b/packages/ui/src/ui-component/table/Table.jsx index 57015070b..9189cc324 100644 --- a/packages/ui/src/ui-component/table/Table.jsx +++ b/packages/ui/src/ui-component/table/Table.jsx @@ -11,21 +11,30 @@ export const TableViewOnly = ({ columns, rows, sx }) => { return row[key] ? : } else if (key === 'type' && row.schema) { // If there's schema information, add a tooltip - const schemaContent = - '[
    ' + - row.schema - .map( - (item) => - `  ${JSON.stringify( - { - [item.name]: item.type - }, - null, - 2 - )}` - ) - .join(',
    ') + - '
    ]' + let schemaContent + if (Array.isArray(row.schema)) { + // Handle array format: [{ name: "field", type: "string" }, ...] + schemaContent = + '[
    ' + + row.schema + .map( + (item) => + `  ${JSON.stringify( + { + [item.name]: item.type + }, + null, + 2 + )}` + ) + .join(',
    ') + + '
    ]' + } else if (typeof row.schema === 'object' && row.schema !== null) { + // Handle object format: { "field": "string", "field2": "number", ... } + schemaContent = JSON.stringify(row.schema, null, 2).replace(/\n/g, '
    ').replace(/ /g, ' ') + } else { + schemaContent = 'No schema available' + } return ( diff --git a/packages/ui/src/ui-component/table/TableStyles.jsx b/packages/ui/src/ui-component/table/TableStyles.jsx new file mode 100644 index 000000000..45cee6a1d --- /dev/null +++ b/packages/ui/src/ui-component/table/TableStyles.jsx @@ -0,0 +1,22 @@ +import { styled } from '@mui/material/styles' +import { TableCell, TableRow } from '@mui/material' +import { tableCellClasses } from '@mui/material/TableCell' + +export const StyledTableCell = styled(TableCell)(({ theme }) => ({ + borderColor: theme.palette.grey[900] + 25, + + [`&.${tableCellClasses.head}`]: { + color: theme.palette.grey[900] + }, + [`&.${tableCellClasses.body}`]: { + fontSize: 14, + height: 64 + } +})) + +export const StyledTableRow = styled(TableRow)(() => ({ + // hide last border + '&:last-child td, &:last-child th': { + border: 0 + } +})) diff --git a/packages/ui/src/ui-component/tooltip/MoreItemsTooltip.jsx b/packages/ui/src/ui-component/tooltip/MoreItemsTooltip.jsx new file mode 100644 index 000000000..260240453 --- /dev/null +++ b/packages/ui/src/ui-component/tooltip/MoreItemsTooltip.jsx @@ -0,0 +1,40 @@ +import { Tooltip, Typography } from '@mui/material' +import { styled } from '@mui/material/styles' +import PropTypes from 'prop-types' + +const StyledOl = styled('ol')(() => ({ + paddingLeft: 20, + margin: 0 +})) + +const StyledLi = styled('li')(() => ({ + paddingBottom: 4 +})) + +const MoreItemsTooltip = ({ images, children }) => { + if (!images || images.length === 0) return children + + return ( + + {images.map((img) => ( + + {img.label} + + ))} + + } + placement='top' + > + {children} + + ) +} + +export default MoreItemsTooltip + +MoreItemsTooltip.propTypes = { + images: PropTypes.array, + children: PropTypes.node +} diff --git a/packages/ui/src/utils/authUtils.js b/packages/ui/src/utils/authUtils.js new file mode 100644 index 000000000..5d40a4802 --- /dev/null +++ b/packages/ui/src/utils/authUtils.js @@ -0,0 +1,81 @@ +const getCurrentUser = () => { + if (!localStorage.getItem('user') || localStorage.getItem('user') === 'undefined') return undefined + return JSON.parse(localStorage.getItem('user')) +} + +const updateCurrentUser = (user) => { + let stringifiedUser = user + if (typeof user === 'object') { + stringifiedUser = JSON.stringify(user) + } + localStorage.setItem('user', stringifiedUser) +} + +const removeCurrentUser = () => { + _removeFromStorage() + clearAllCookies() +} + +const _removeFromStorage = () => { + localStorage.removeItem('isAuthenticated') + localStorage.removeItem('isGlobal') + localStorage.removeItem('user') + localStorage.removeItem('permissions') + localStorage.removeItem('features') + localStorage.removeItem('isSSO') +} + +const clearAllCookies = () => { + document.cookie.split(';').forEach((cookie) => { + const name = cookie.split('=')[0].trim() + document.cookie = `${name}=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=/` + }) +} + +const extractUser = (payload) => { + const user = { + id: payload.id, + email: payload.email, + name: payload.name, + status: payload.status, + role: payload.role, + isSSO: payload.isSSO, + activeOrganizationId: payload.activeOrganizationId, + activeOrganizationSubscriptionId: payload.activeOrganizationSubscriptionId, + activeOrganizationCustomerId: payload.activeOrganizationCustomerId, + activeOrganizationProductId: payload.activeOrganizationProductId, + activeWorkspaceId: payload.activeWorkspaceId, + activeWorkspace: payload.activeWorkspace, + lastLogin: payload.lastLogin, + isOrganizationAdmin: payload.isOrganizationAdmin, + assignedWorkspaces: payload.assignedWorkspaces, + permissions: payload.permissions + } + return user +} + +const updateStateAndLocalStorage = (state, payload) => { + const user = extractUser(payload) + state.user = user + state.token = payload.token + state.permissions = payload.permissions + state.features = payload.features + state.isAuthenticated = true + state.isGlobal = user.isOrganizationAdmin + localStorage.setItem('isAuthenticated', 'true') + localStorage.setItem('isGlobal', state.isGlobal) + localStorage.setItem('isSSO', state.user.isSSO) + localStorage.setItem('user', JSON.stringify(user)) + localStorage.setItem('permissions', JSON.stringify(payload.permissions)) + localStorage.setItem('features', JSON.stringify(payload.features)) +} + +const AuthUtils = { + getCurrentUser, + updateCurrentUser, + removeCurrentUser, + updateStateAndLocalStorage, + extractUser +} + +export default AuthUtils diff --git a/packages/ui/src/utils/customMention.js b/packages/ui/src/utils/customMention.js new file mode 100644 index 000000000..f8a8df10e --- /dev/null +++ b/packages/ui/src/utils/customMention.js @@ -0,0 +1,26 @@ +import Mention from '@tiptap/extension-mention' +import { PasteRule } from '@tiptap/core' + +export const CustomMention = Mention.extend({ + renderText({ node }) { + return `{{${node.attrs.label ?? node.attrs.id}}}` + }, + addPasteRules() { + return [ + new PasteRule({ + find: /\{\{([^{}]+)\}\}/g, + handler: ({ match, chain, range }) => { + const label = match[1].trim() + if (label) { + chain() + .deleteRange(range) + .insertContentAt(range.from, { + type: this.name, + attrs: { id: label, label: label } + }) + } + } + }) + ] + } +}) diff --git a/packages/ui/src/utils/exportImport.js b/packages/ui/src/utils/exportImport.js index 1d8b3005f..a9361a7fa 100644 --- a/packages/ui/src/utils/exportImport.js +++ b/packages/ui/src/utils/exportImport.js @@ -66,6 +66,37 @@ const sanitizeAssistant = (Assistant) => { } } +const sanitizeCustomTemplate = (CustomTemplate) => { + try { + return CustomTemplate.map((customTemplate) => { + return { ...customTemplate, usecases: JSON.stringify(customTemplate.usecases), workspaceId: undefined } + }) + } catch (error) { + throw new Error(`exportImport.sanitizeCustomTemplate ${getErrorMessage(error)}`) + } +} + +const sanitizeDocumentStore = (DocumentStore) => { + try { + return DocumentStore.map((documentStore) => { + return { ...documentStore, workspaceId: undefined } + }) + } catch (error) { + throw new Error(`exportImport.sanitizeDocumentStore ${getErrorMessage(error)}`) + } +} + +const sanitizeExecution = (Execution) => { + try { + return Execution.map((execution) => { + if (execution.agentflow) execution.agentflow.workspaceId = undefined + return { ...execution, workspaceId: undefined } + }) + } catch (error) { + throw new Error(`exportImport.sanitizeExecution ${getErrorMessage(error)}`) + } +} + export const stringify = (object) => { try { return JSON.stringify(object, null, 2) @@ -86,10 +117,10 @@ export const exportData = (exportAllData) => { ChatFlow: sanitizeChatflow(exportAllData.ChatFlow), ChatMessage: exportAllData.ChatMessage, ChatMessageFeedback: exportAllData.ChatMessageFeedback, - CustomTemplate: exportAllData.CustomTemplate, - DocumentStore: exportAllData.DocumentStore, + CustomTemplate: sanitizeCustomTemplate(exportAllData.CustomTemplate), + DocumentStore: sanitizeDocumentStore(exportAllData.DocumentStore), DocumentStoreFileChunk: exportAllData.DocumentStoreFileChunk, - Execution: exportAllData.Execution, + Execution: sanitizeExecution(exportAllData.Execution), Tool: sanitizeTool(exportAllData.Tool), Variable: sanitizeVariable(exportAllData.Variable) } diff --git a/packages/ui/src/utils/genericHelper.js b/packages/ui/src/utils/genericHelper.js index 97542114e..ac834c77f 100644 --- a/packages/ui/src/utils/genericHelper.js +++ b/packages/ui/src/utils/genericHelper.js @@ -244,6 +244,63 @@ export const updateOutdatedNodeData = (newComponentNodeData, existingComponentNo } } } + + // Handle loadConfig parameters - preserve configuration objects + if (existingComponentNodeData.inputs && initNewComponentNodeData.inputParams) { + // Find parameters with loadConfig: true + const loadConfigParams = initNewComponentNodeData.inputParams.filter((param) => param.loadConfig === true) + + for (const param of loadConfigParams) { + const configKey = `${param.name}Config` + + // Preserve top-level config objects (e.g., agentModelConfig) + if (existingComponentNodeData.inputs[configKey]) { + initNewComponentNodeData.inputs[configKey] = existingComponentNodeData.inputs[configKey] + } + } + + // Handle array parameters that might contain loadConfig items + const arrayParams = initNewComponentNodeData.inputParams.filter((param) => param.type === 'array' && param.array) + + for (const arrayParam of arrayParams) { + if (existingComponentNodeData.inputs[arrayParam.name] && Array.isArray(existingComponentNodeData.inputs[arrayParam.name])) { + const existingArray = existingComponentNodeData.inputs[arrayParam.name] + + // Find loadConfig parameters within the array definition + const arrayLoadConfigParams = arrayParam.array.filter((subParam) => subParam.loadConfig === true) + + if (arrayLoadConfigParams.length > 0) { + // Process each array item to preserve config objects + const updatedArray = existingArray.map((existingItem) => { + if (typeof existingItem === 'object' && existingItem !== null) { + const updatedItem = { ...existingItem } + + // Preserve config objects for each loadConfig parameter in the array + for (const loadConfigParam of arrayLoadConfigParams) { + const configKey = `${loadConfigParam.name}Config` + if (existingItem[configKey]) { + updatedItem[configKey] = existingItem[configKey] + } + } + + return updatedItem + } + return existingItem + }) + + initNewComponentNodeData.inputs[arrayParam.name] = updatedArray + } + } + } + + // Also preserve any config keys that exist in the existing data but might not be explicitly handled above + // This catches edge cases where config keys exist but don't follow the expected pattern + for (const key in existingComponentNodeData.inputs) { + if (key.endsWith('Config') && !initNewComponentNodeData.inputs[key]) { + initNewComponentNodeData.inputs[key] = existingComponentNodeData.inputs[key] + } + } + } // Check for tabs const inputParamsWithTabIdentifiers = initNewComponentNodeData.inputParams.filter((param) => param.tabIdentifier) || [] @@ -268,7 +325,7 @@ export const updateOutdatedNodeData = (newComponentNodeData, existingComponentNo initNewComponentNodeData.label = existingComponentNodeData.label } - // Special case for Condition node to update outputAnchors + // Special case for Sequential Condition node to update outputAnchors if (initNewComponentNodeData.name.includes('seqCondition')) { const options = existingComponentNodeData.outputAnchors[0].options || [] @@ -294,6 +351,22 @@ export const updateOutdatedNodeEdge = (newComponentNodeData, edges) => { const isAgentFlowV2 = newComponentNodeData.category === 'Agent Flows' + // Helper to compare handle/anchor IDs while ignoring trailing base-class/type suffixes + // Example: + // azureChatOpenAI_0-output-azureChatOpenAI-A|B|C vs azureChatOpenAI_0-output-azureChatOpenAI-A|B + // We compare by stripping the last "-..." segment if it contains pipes. + const handlesEqual = (a, b) => { + if (a === b) return true + const stripPipeSuffix = (s) => { + if (!s) return s + const lastDash = s.lastIndexOf('-') + if (lastDash === -1) return s + const suffix = s.substring(lastDash + 1) + return suffix.includes('|') ? s.substring(0, lastDash) : s + } + return stripPipeSuffix(a) === stripPipeSuffix(b) + } + for (const edge of edges) { const targetNodeId = edge.targetHandle.split('-')[0] const sourceNodeId = edge.sourceHandle.split('-')[0] @@ -305,8 +378,8 @@ export const updateOutdatedNodeEdge = (newComponentNodeData, edges) => { } } else { // Check if targetHandle is in inputParams or inputAnchors - const inputParam = newComponentNodeData.inputParams.find((param) => param.id === edge.targetHandle) - const inputAnchor = newComponentNodeData.inputAnchors.find((param) => param.id === edge.targetHandle) + const inputParam = newComponentNodeData.inputParams.find((param) => handlesEqual(param.id, edge.targetHandle)) + const inputAnchor = newComponentNodeData.inputAnchors.find((param) => handlesEqual(param.id, edge.targetHandle)) if (!inputParam && !inputAnchor) { removedEdges.push(edge) @@ -322,11 +395,11 @@ export const updateOutdatedNodeEdge = (newComponentNodeData, edges) => { for (const outputAnchor of newComponentNodeData.outputAnchors) { const outputAnchorType = outputAnchor.type if (outputAnchorType === 'options') { - if (!outputAnchor.options.find((outputOption) => outputOption.id === edge.sourceHandle)) { + if (!outputAnchor.options.find((outputOption) => handlesEqual(outputOption.id, edge.sourceHandle))) { removedEdges.push(edge) } } else { - if (outputAnchor.id !== edge.sourceHandle) { + if (!handlesEqual(outputAnchor.id, edge.sourceHandle)) { removedEdges.push(edge) } } @@ -982,6 +1055,18 @@ export const kFormatter = (num) => { return item ? (num / item.value).toFixed(1).replace(regexp, '').concat(item.symbol) : '0' } +export const redirectWhenUnauthorized = ({ error, redirectTo }) => { + if (error === 'unauthorized') { + window.location.href = redirectTo + } else if (error === 'subscription_canceled') { + window.location.href = `${redirectTo}?error=${error}` + } +} + +export const truncateString = (str, maxLength) => { + return str.length > maxLength ? `${str.slice(0, maxLength - 3)}...` : str +} + const toCamelCase = (str) => { return str .split(' ') // Split by space to process each word @@ -1106,42 +1191,87 @@ const _showHideOperation = (nodeData, inputParam, displayType, index) => { if (path.includes('$index')) { path = path.replace('$index', index) } - const groundValue = get(nodeData.inputs, path, '') + let groundValue = get(nodeData.inputs, path, '') + if (groundValue && typeof groundValue === 'string' && groundValue.startsWith('[') && groundValue.endsWith(']')) { + groundValue = JSON.parse(groundValue) + } - if (Array.isArray(comparisonValue)) { - if (displayType === 'show' && !comparisonValue.includes(groundValue)) { - inputParam.display = false + // Handle case where groundValue is an array + if (Array.isArray(groundValue)) { + if (Array.isArray(comparisonValue)) { + // Both are arrays - check if there's any intersection + const hasIntersection = comparisonValue.some((val) => groundValue.includes(val)) + if (displayType === 'show' && !hasIntersection) { + inputParam.display = false + } + if (displayType === 'hide' && hasIntersection) { + inputParam.display = false + } + } else if (typeof comparisonValue === 'string') { + // comparisonValue is string, groundValue is array - check if array contains the string + const matchFound = groundValue.some((val) => comparisonValue === val || new RegExp(comparisonValue).test(val)) + if (displayType === 'show' && !matchFound) { + inputParam.display = false + } + if (displayType === 'hide' && matchFound) { + inputParam.display = false + } + } else if (typeof comparisonValue === 'boolean' || typeof comparisonValue === 'number') { + // For boolean/number comparison with array, check if array contains the value + const matchFound = groundValue.includes(comparisonValue) + if (displayType === 'show' && !matchFound) { + inputParam.display = false + } + if (displayType === 'hide' && matchFound) { + inputParam.display = false + } + } else if (typeof comparisonValue === 'object') { + // For object comparison with array, use deep equality check + const matchFound = groundValue.some((val) => isEqual(comparisonValue, val)) + if (displayType === 'show' && !matchFound) { + inputParam.display = false + } + if (displayType === 'hide' && matchFound) { + inputParam.display = false + } } - if (displayType === 'hide' && comparisonValue.includes(groundValue)) { - inputParam.display = false - } - } else if (typeof comparisonValue === 'string') { - if (displayType === 'show' && !(comparisonValue === groundValue || new RegExp(comparisonValue).test(groundValue))) { - inputParam.display = false - } - if (displayType === 'hide' && (comparisonValue === groundValue || new RegExp(comparisonValue).test(groundValue))) { - inputParam.display = false - } - } else if (typeof comparisonValue === 'boolean') { - if (displayType === 'show' && comparisonValue !== groundValue) { - inputParam.display = false - } - if (displayType === 'hide' && comparisonValue === groundValue) { - inputParam.display = false - } - } else if (typeof comparisonValue === 'object') { - if (displayType === 'show' && !isEqual(comparisonValue, groundValue)) { - inputParam.display = false - } - if (displayType === 'hide' && isEqual(comparisonValue, groundValue)) { - inputParam.display = false - } - } else if (typeof comparisonValue === 'number') { - if (displayType === 'show' && comparisonValue !== groundValue) { - inputParam.display = false - } - if (displayType === 'hide' && comparisonValue === groundValue) { - inputParam.display = false + } else { + // Original logic for non-array groundValue + if (Array.isArray(comparisonValue)) { + if (displayType === 'show' && !comparisonValue.includes(groundValue)) { + inputParam.display = false + } + if (displayType === 'hide' && comparisonValue.includes(groundValue)) { + inputParam.display = false + } + } else if (typeof comparisonValue === 'string') { + if (displayType === 'show' && !(comparisonValue === groundValue || new RegExp(comparisonValue).test(groundValue))) { + inputParam.display = false + } + if (displayType === 'hide' && (comparisonValue === groundValue || new RegExp(comparisonValue).test(groundValue))) { + inputParam.display = false + } + } else if (typeof comparisonValue === 'boolean') { + if (displayType === 'show' && comparisonValue !== groundValue) { + inputParam.display = false + } + if (displayType === 'hide' && comparisonValue === groundValue) { + inputParam.display = false + } + } else if (typeof comparisonValue === 'object') { + if (displayType === 'show' && !isEqual(comparisonValue, groundValue)) { + inputParam.display = false + } + if (displayType === 'hide' && isEqual(comparisonValue, groundValue)) { + inputParam.display = false + } + } else if (typeof comparisonValue === 'number') { + if (displayType === 'show' && comparisonValue !== groundValue) { + inputParam.display = false + } + if (displayType === 'hide' && comparisonValue === groundValue) { + inputParam.display = false + } } } }) diff --git a/packages/ui/src/utils/validation.js b/packages/ui/src/utils/validation.js new file mode 100644 index 000000000..077f4723d --- /dev/null +++ b/packages/ui/src/utils/validation.js @@ -0,0 +1,17 @@ +import { z } from 'zod' + +export const passwordSchema = z + .string() + .min(8, 'Password must be at least 8 characters') + .regex(/[a-z]/, 'Password must contain at least one lowercase letter') + .regex(/[A-Z]/, 'Password must contain at least one uppercase letter') + .regex(/\d/, 'Password must contain at least one digit') + .regex(/[^a-zA-Z0-9]/, 'Password must contain at least one special character') + +export const validatePassword = (password) => { + const result = passwordSchema.safeParse(password) + if (!result.success) { + return result.error.errors.map((err) => err.message) + } + return [] +} diff --git a/packages/ui/src/views/account/index.jsx b/packages/ui/src/views/account/index.jsx new file mode 100644 index 000000000..b96720773 --- /dev/null +++ b/packages/ui/src/views/account/index.jsx @@ -0,0 +1,1388 @@ +import { useEffect, useMemo, useState } from 'react' +import { useDispatch, useSelector } from 'react-redux' +import { useNavigate } from 'react-router-dom' + +// utils +import useNotifier from '@/utils/useNotifier' +import { validatePassword } from '@/utils/validation' + +// material-ui +import { + Box, + Button, + CircularProgress, + Dialog, + DialogActions, + DialogContent, + DialogTitle, + LinearProgress, + OutlinedInput, + Skeleton, + Stack, + TextField, + Typography +} from '@mui/material' +import { darken, useTheme } from '@mui/material/styles' + +// project imports +import ViewHeader from '@/layout/MainLayout/ViewHeader' +import { StyledButton } from '@/ui-component/button/StyledButton' +import MainCard from '@/ui-component/cards/MainCard' +import SettingsSection from '@/ui-component/form/settings' +import PricingDialog from '@/ui-component/subscription/PricingDialog' + +// Icons +import { IconAlertCircle, IconCreditCard, IconExternalLink, IconSparkles, IconX } from '@tabler/icons-react' + +// API +import accountApi from '@/api/account.api' +import pricingApi from '@/api/pricing' +import userApi from '@/api/user' + +// Hooks +import useApi from '@/hooks/useApi' + +// Store +import { store } from '@/store' +import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction } from '@/store/actions' +import { gridSpacing } from '@/store/constant' +import { useConfig } from '@/store/context/ConfigContext' +import { logoutSuccess, userProfileUpdated } from '@/store/reducers/authSlice' + +// ==============================|| ACCOUNT SETTINGS ||============================== // + +const calculatePercentage = (count, total) => { + return Math.min((count / total) * 100, 100) +} + +const AccountSettings = () => { + const theme = useTheme() + const dispatch = useDispatch() + useNotifier() + const navigate = useNavigate() + + const currentUser = useSelector((state) => state.auth.user) + const customization = useSelector((state) => state.customization) + + const { isCloud } = useConfig() + + const [isLoading, setLoading] = useState(true) + const [profileName, setProfileName] = useState('') + const [email, setEmail] = useState('') + const [oldPassword, setOldPassword] = useState('') + const [newPassword, setNewPassword] = useState('') + const [confirmPassword, setConfirmPassword] = useState('') + const [usage, setUsage] = useState(null) + const [isBillingLoading, setIsBillingLoading] = useState(false) + const [seatsQuantity, setSeatsQuantity] = useState(0) + const [prorationInfo, setProrationInfo] = useState(null) + const [isUpdatingSeats, setIsUpdatingSeats] = useState(false) + const [openPricingDialog, setOpenPricingDialog] = useState(false) + const [openRemoveSeatsDialog, setOpenRemoveSeatsDialog] = useState(false) + const [openAddSeatsDialog, setOpenAddSeatsDialog] = useState(false) + const [includedSeats, setIncludedSeats] = useState(0) + const [purchasedSeats, setPurchasedSeats] = useState(0) + const [occupiedSeats, setOccupiedSeats] = useState(0) + const [totalSeats, setTotalSeats] = useState(0) + + const predictionsUsageInPercent = useMemo(() => { + return usage ? calculatePercentage(usage.predictions?.usage, usage.predictions?.limit) : 0 + }, [usage]) + const storageUsageInPercent = useMemo(() => { + return usage ? calculatePercentage(usage.storage?.usage, usage.storage?.limit) : 0 + }, [usage]) + + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const getUserByIdApi = useApi(userApi.getUserById) + const getPricingPlansApi = useApi(pricingApi.getPricingPlans) + const getAdditionalSeatsQuantityApi = useApi(userApi.getAdditionalSeatsQuantity) + const getAdditionalSeatsProrationApi = useApi(userApi.getAdditionalSeatsProration) + const getCustomerDefaultSourceApi = useApi(userApi.getCustomerDefaultSource) + const updateAdditionalSeatsApi = useApi(userApi.updateAdditionalSeats) + const getCurrentUsageApi = useApi(userApi.getCurrentUsage) + const logoutApi = useApi(accountApi.logout) + + useEffect(() => { + if (currentUser) { + getUserByIdApi.request(currentUser.id) + } else { + window.location.href = '/login' + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [currentUser]) + + useEffect(() => { + if (isCloud) { + getPricingPlansApi.request() + getAdditionalSeatsQuantityApi.request(currentUser?.activeOrganizationSubscriptionId) + getCurrentUsageApi.request() + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [isCloud]) + + useEffect(() => { + setLoading(getUserByIdApi.loading) + }, [getUserByIdApi.loading]) + + useEffect(() => { + try { + if (getUserByIdApi.data) { + setProfileName(getUserByIdApi.data?.name || '') + setEmail(getUserByIdApi.data?.email || '') + } + } catch (e) { + console.error(e) + } + }, [getUserByIdApi.data]) + + useEffect(() => { + if (getCurrentUsageApi.data) { + setUsage(getCurrentUsageApi.data) + } + }, [getCurrentUsageApi.data]) + + useEffect(() => { + try { + if (logoutApi.data && logoutApi.data.message === 'logged_out') { + store.dispatch(logoutSuccess()) + window.location.href = logoutApi.data.redirectTo + } + } catch (e) { + console.error(e) + } + }, [logoutApi.data]) + + useEffect(() => { + if (openRemoveSeatsDialog || openAddSeatsDialog) { + setSeatsQuantity(0) + getCustomerDefaultSourceApi.request(currentUser?.activeOrganizationCustomerId) + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [openRemoveSeatsDialog, openAddSeatsDialog]) + + useEffect(() => { + if (getAdditionalSeatsProrationApi.data) { + setProrationInfo(getAdditionalSeatsProrationApi.data) + } + }, [getAdditionalSeatsProrationApi.data]) + + useEffect(() => { + if (!getAdditionalSeatsQuantityApi.loading && getAdditionalSeatsQuantityApi.data) { + const included = getAdditionalSeatsQuantityApi.data?.includedSeats || 1 + const purchased = getAdditionalSeatsQuantityApi.data?.quantity || 0 + const occupied = getAdditionalSeatsQuantityApi.data?.totalOrgUsers || 1 + + setIncludedSeats(included) + setPurchasedSeats(purchased) + setOccupiedSeats(occupied) + setTotalSeats(included + purchased) + } + }, [getAdditionalSeatsQuantityApi.data, getAdditionalSeatsQuantityApi.loading]) + + const currentPlanTitle = useMemo(() => { + if (!getPricingPlansApi.data) return '' + const currentPlan = getPricingPlansApi.data.find((plan) => plan.prodId === currentUser?.activeOrganizationProductId) + return currentPlan?.title || '' + }, [getPricingPlansApi.data, currentUser?.activeOrganizationProductId]) + + const handleBillingPortalClick = async () => { + setIsBillingLoading(true) + try { + const resp = await accountApi.getBillingData() + if (resp.data?.url) { + window.open(resp.data.url, '_blank') + } + } catch (error) { + enqueueSnackbar({ + message: 'Failed to access billing portal', + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + action: (key) => ( + + ) + } + }) + } finally { + setIsBillingLoading(false) + } + } + + const saveProfileData = async () => { + try { + const obj = { + id: currentUser.id, + name: profileName, + email: email + } + const saveProfileResp = await userApi.updateUser(obj) + if (saveProfileResp.data) { + store.dispatch(userProfileUpdated(saveProfileResp.data)) + enqueueSnackbar({ + message: 'Profile updated', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + } + } catch (error) { + enqueueSnackbar({ + message: `Failed to update profile: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } + } + + const savePassword = async () => { + try { + const validationErrors = [] + if (!oldPassword) { + validationErrors.push('Old Password cannot be left blank') + } + if (newPassword !== confirmPassword) { + validationErrors.push('New Password and Confirm Password do not match') + } + const passwordErrors = validatePassword(newPassword) + if (passwordErrors.length > 0) { + validationErrors.push(...passwordErrors) + } + if (validationErrors.length > 0) { + enqueueSnackbar({ + message: validationErrors.join(', '), + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + return + } + + const obj = { + id: currentUser.id, + oldPassword, + newPassword, + confirmPassword + } + const saveProfileResp = await userApi.updateUser(obj) + if (saveProfileResp.data) { + store.dispatch(userProfileUpdated(saveProfileResp.data)) + setOldPassword('') + setNewPassword('') + setConfirmPassword('') + await logoutApi.request() + enqueueSnackbar({ + message: 'Password updated', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + } + } catch (error) { + enqueueSnackbar({ + message: `Failed to update password: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } + } + + const handleSeatsModification = async (newSeatsAmount) => { + try { + setIsUpdatingSeats(true) + + if (!prorationInfo?.prorationDate) { + throw new Error('No proration date available') + } + + await updateAdditionalSeatsApi.request( + currentUser?.activeOrganizationSubscriptionId, + newSeatsAmount, + prorationInfo.prorationDate + ) + enqueueSnackbar({ + message: 'Seats updated successfully', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + // Refresh the seats quantity display + getAdditionalSeatsQuantityApi.request(currentUser?.activeOrganizationSubscriptionId) + } catch (error) { + console.error('Error updating seats:', error) + enqueueSnackbar({ + message: `Failed to update seats: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } finally { + setIsUpdatingSeats(false) + setProrationInfo(null) + setOpenAddSeatsDialog(false) + setOpenRemoveSeatsDialog(false) + setSeatsQuantity(0) + } + } + + const handleQuantityChange = (value, operation) => { + setSeatsQuantity(value) + // Calculate proration for the new quantity + const totalAdditionalSeats = operation === 'add' ? purchasedSeats + value : purchasedSeats - value + if (currentUser?.activeOrganizationSubscriptionId) { + getAdditionalSeatsProrationApi.request(currentUser.activeOrganizationSubscriptionId, totalAdditionalSeats) + } + } + + const handleRemoveSeatsDialogClose = () => { + if (!isUpdatingSeats) { + setProrationInfo(null) + setOpenRemoveSeatsDialog(false) + setSeatsQuantity(0) + } + } + + const handleAddSeatsDialogClose = () => { + if (!isUpdatingSeats) { + setProrationInfo(null) + setOpenAddSeatsDialog(false) + setSeatsQuantity(0) + } + } + + // Calculate empty seats + const emptySeats = Math.min(purchasedSeats, totalSeats - occupiedSeats) + + return ( + + + + {isLoading && !getUserByIdApi.data ? ( + + + + + + + + + + + + + + + + ) : ( + <> + {isCloud && ( + <> + + + + {currentPlanTitle && ( + + Current Organization Plan: + + {currentPlanTitle.toUpperCase()} + + + )} + + Update your billing details and subscription + + + + + + + + + + + + + Seats Included in Plan: + + {getAdditionalSeatsQuantityApi.loading ? : includedSeats} + + + + Additional Seats Purchased: + + {getAdditionalSeatsQuantityApi.loading ? ( + + ) : ( + purchasedSeats + )} + + + + Occupied Seats: + + {getAdditionalSeatsQuantityApi.loading ? ( + + ) : ( + `${occupiedSeats}/${totalSeats}` + )} + + + + + {getAdditionalSeatsQuantityApi.data?.quantity > 0 && + currentPlanTitle.toUpperCase() === 'PRO' && ( + + )} + { + if (currentPlanTitle.toUpperCase() === 'PRO') { + setOpenAddSeatsDialog(true) + } else { + setOpenPricingDialog(true) + } + }} + title='Add Seats is available only for PRO plan' + sx={{ borderRadius: 2, height: 40 }} + > + Add Seats + + + + + + + + + Predictions + + {`${usage?.predictions?.usage || 0} / ${usage?.predictions?.limit || 0}`} + + + + + { + if (predictionsUsageInPercent > 90) return theme.palette.error.main + if (predictionsUsageInPercent > 75) return theme.palette.warning.main + if (predictionsUsageInPercent > 50) return theme.palette.success.light + return theme.palette.success.main + } + } + }} + value={predictionsUsageInPercent > 100 ? 100 : predictionsUsageInPercent} + variant='determinate' + /> + + {`${predictionsUsageInPercent.toFixed( + 2 + )}%`} + + + + + Storage + + {`${(usage?.storage?.usage || 0).toFixed(2)}MB / ${(usage?.storage?.limit || 0).toFixed( + 2 + )}MB`} + + + + + { + if (storageUsageInPercent > 90) return theme.palette.error.main + if (storageUsageInPercent > 75) return theme.palette.warning.main + if (storageUsageInPercent > 50) return theme.palette.success.light + return theme.palette.success.main + } + } + }} + value={storageUsageInPercent > 100 ? 100 : storageUsageInPercent} + variant='determinate' + /> + + {`${storageUsageInPercent.toFixed( + 2 + )}%`} + + + + + + )} + + Save + + } + title='Profile' + > + + + Name + setProfileName(e.target.value)} + value={profileName} + /> + + + Email Address + setEmail(e.target.value)} + value={email} + /> + + + + {!currentUser.isSSO && ( + + Save + + } + title='Security' + > + + + Old Password + setOldPassword(e.target.value)} + value={oldPassword} + /> + + + New Password + setNewPassword(e.target.value)} + value={newPassword} + /> + + + Password must be at least 8 characters long and contain at least one lowercase letter, one + uppercase letter, one digit, and one special character. + + + + + Confirm New Password + setConfirmPassword(e.target.value)} + value={confirmPassword} + /> + + + + )} + + )} + + {openPricingDialog && isCloud && ( + { + setOpenPricingDialog(false) + if (planUpdated) { + navigate('/') + navigate(0) + } + }} + /> + )} + {/* Remove Seats Dialog */} + + Remove Additional Seats + + + {emptySeats === 0 ? ( + + + You must remove users from your organization before removing seats. + + ) : ( + + {/* Occupied Seats */} + + Occupied Seats + {occupiedSeats} + + + {/* Empty Seats */} + + Empty Seats + {emptySeats} + + + + Number of Empty Seats to Remove + { + const value = Math.max(0, Math.min(emptySeats, parseInt(e.target.value) || 0)) + handleQuantityChange(value, 'remove') + }} + onKeyDown={(e) => { + if (e.key === '-' || e.key === 'e') { + e.preventDefault() + } + }} + InputProps={{ + inputProps: { + min: 0, + max: emptySeats, + step: 1 + } + }} + sx={{ width: '70px' }} + disabled={!getCustomerDefaultSourceApi.data} + /> + + + {/* Total Seats */} + + New Total Seats + {totalSeats - seatsQuantity} + + + )} + + {getAdditionalSeatsProrationApi.loading && ( + + + + )} + + {getCustomerDefaultSourceApi.loading ? ( + + ) : getCustomerDefaultSourceApi.data?.invoice_settings?.default_payment_method ? ( + + Payment Method + + {getCustomerDefaultSourceApi.data.invoice_settings.default_payment_method.card && ( + <> + + + + {getCustomerDefaultSourceApi.data.invoice_settings.default_payment_method.card.brand} + + + โ€ขโ€ขโ€ขโ€ข{' '} + {getCustomerDefaultSourceApi.data.invoice_settings.default_payment_method.card.last4} + + + (expires{' '} + { + getCustomerDefaultSourceApi.data.invoice_settings.default_payment_method.card + .exp_month + } + / + {getCustomerDefaultSourceApi.data.invoice_settings.default_payment_method.card.exp_year} + ) + + + + )} + + + ) : ( + + + + No payment method found + + + + )} + + {/* Proration info */} + {prorationInfo && ( + + {/* Date Range */} + + {new Date(prorationInfo.currentPeriodStart * 1000).toLocaleDateString('en-US', { + month: 'short', + day: 'numeric' + })}{' '} + -{' '} + {new Date(prorationInfo.currentPeriodEnd * 1000).toLocaleDateString('en-US', { + month: 'short', + day: 'numeric', + year: 'numeric' + })} + + + {/* Base Plan */} + + {currentPlanTitle} + + {prorationInfo.currency} {Math.max(0, prorationInfo.basePlanAmount).toFixed(2)} + + + + {/* Additional Seats */} + + + Additional Seats Left (Prorated) + + Qty {purchasedSeats - seatsQuantity} + + + + + {prorationInfo.currency} {Math.max(0, prorationInfo.additionalSeatsProratedAmount).toFixed(2)} + + + {prorationInfo.currency} {prorationInfo.seatPerUnitPrice.toFixed(2)} each + + + + + {prorationInfo.prorationAmount < 0 && ( + + Credit balance + + {prorationInfo.currency} {prorationInfo.prorationAmount < 0 ? '+' : ''} + {Math.abs(prorationInfo.prorationAmount).toFixed(2)} + + + )} + + {/* Next Payment */} + + Due today + + {prorationInfo.currency} {Math.max(0, prorationInfo.prorationAmount).toFixed(2)} + + + + {prorationInfo.prorationAmount < 0 && ( + + Your available credit will automatically apply to your next invoice. + + )} + + )} + + + {getCustomerDefaultSourceApi.data?.invoice_settings?.default_payment_method && ( + + + + + )} + + {/* Add Seats Dialog */} + + Add Additional Seats + + + + {/* Occupied Seats */} + + Occupied Seats + {occupiedSeats} + + + {/* Included Seats */} + + Seats Included with Plan + {includedSeats} + + + {/* Additional Seats */} + + Additional Seats Purchased + {purchasedSeats} + + + + Number of Additional Seats to Add + { + const value = Math.max(0, parseInt(e.target.value) || 0) + handleQuantityChange(value, 'add') + }} + onKeyDown={(e) => { + if (e.key === '-' || e.key === 'e') { + e.preventDefault() + } + }} + InputProps={{ + inputProps: { + min: 0 + } + }} + sx={{ width: '70px' }} + disabled={!getCustomerDefaultSourceApi.data} + /> + + + {/* Total Seats */} + + New Total Seats + {totalSeats + seatsQuantity} + + + + {getAdditionalSeatsProrationApi.loading && ( + + + + )} + + {getCustomerDefaultSourceApi.loading ? ( + + ) : getCustomerDefaultSourceApi.data?.invoice_settings?.default_payment_method ? ( + + Payment Method + + {getCustomerDefaultSourceApi.data.invoice_settings.default_payment_method.card && ( + <> + + + + {getCustomerDefaultSourceApi.data.invoice_settings.default_payment_method.card.brand} + + + โ€ขโ€ขโ€ขโ€ข{' '} + {getCustomerDefaultSourceApi.data.invoice_settings.default_payment_method.card.last4} + + + (expires{' '} + { + getCustomerDefaultSourceApi.data.invoice_settings.default_payment_method.card + .exp_month + } + / + {getCustomerDefaultSourceApi.data.invoice_settings.default_payment_method.card.exp_year} + ) + + + + )} + + + ) : ( + + + + No payment method found + + + + )} + + {/* Proration info */} + {prorationInfo && ( + + {/* Date Range */} + + {new Date(prorationInfo.currentPeriodStart * 1000).toLocaleDateString('en-US', { + month: 'short', + day: 'numeric' + })}{' '} + -{' '} + {new Date(prorationInfo.currentPeriodEnd * 1000).toLocaleDateString('en-US', { + month: 'short', + day: 'numeric', + year: 'numeric' + })} + + + {/* Base Plan */} + + {currentPlanTitle} + + {prorationInfo.currency} {prorationInfo.basePlanAmount.toFixed(2)} + + + + {/* Additional Seats */} + + + Additional Seats (Prorated) + + Qty {seatsQuantity + purchasedSeats} + + + + + {prorationInfo.currency} {prorationInfo.additionalSeatsProratedAmount.toFixed(2)} + + + {prorationInfo.currency} {prorationInfo.seatPerUnitPrice.toFixed(2)} each + + + + + {/* Credit Balance */} + {prorationInfo.creditBalance !== 0 && ( + + Applied account balance + + {prorationInfo.currency} {prorationInfo.creditBalance.toFixed(2)} + + + )} + + {/* Next Payment */} + + Due today + + {prorationInfo.currency}{' '} + {Math.max(0, prorationInfo.prorationAmount + prorationInfo.creditBalance).toFixed(2)} + + + + {prorationInfo.prorationAmount === 0 && prorationInfo.creditBalance < 0 && ( + + Your available credit will automatically apply to your next invoice. + + )} + + )} + + + {getCustomerDefaultSourceApi.data?.invoice_settings?.default_payment_method && ( + + + + + )} + + + ) +} + +export default AccountSettings diff --git a/packages/ui/src/views/agentexecutions/ExecutionDetails.jsx b/packages/ui/src/views/agentexecutions/ExecutionDetails.jsx index 21e81d64c..c89a1130f 100644 --- a/packages/ui/src/views/agentexecutions/ExecutionDetails.jsx +++ b/packages/ui/src/views/agentexecutions/ExecutionDetails.jsx @@ -725,7 +725,8 @@ export const ExecutionDetails = ({ open, isPublic, execution, metadata, onClose, flex: '1 1 35%', padding: 2, borderRight: 1, - borderColor: 'divider' + borderColor: 'divider', + overflow: 'auto' }} > } variant='outlined' - label={metadata?.agentflow?.name || metadata?.agentflow?.id || 'Go to AgentFlow'} + label={localMetadata?.agentflow?.name || localMetadata?.agentflow?.id || 'Go to AgentFlow'} className={'button'} - onClick={() => window.open(`/v2/agentcanvas/${metadata?.agentflow?.id}`, '_blank')} + onClick={() => window.open(`/v2/agentcanvas/${localMetadata?.agentflow?.id}`, '_blank')} /> )} diff --git a/packages/ui/src/views/agentexecutions/NodeExecutionDetails.jsx b/packages/ui/src/views/agentexecutions/NodeExecutionDetails.jsx index 5a5296912..4b750d8fa 100644 --- a/packages/ui/src/views/agentexecutions/NodeExecutionDetails.jsx +++ b/packages/ui/src/views/agentexecutions/NodeExecutionDetails.jsx @@ -1,6 +1,7 @@ import { useState } from 'react' import { useSelector } from 'react-redux' import PropTypes from 'prop-types' +import axios from 'axios' // MUI import { @@ -24,15 +25,17 @@ import { } from '@mui/material' import { useTheme, darken } from '@mui/material/styles' import { useSnackbar } from 'notistack' -import { IconCoins, IconClock, IconChevronDown } from '@tabler/icons-react' +import { IconCoins, IconClock, IconChevronDown, IconDownload, IconTool } from '@tabler/icons-react' import toolSVG from '@/assets/images/tool.svg' // Project imports import { MemoizedReactMarkdown } from '@/ui-component/markdown/MemoizedReactMarkdown' +import { SafeHTML } from '@/ui-component/safe/SafeHTML' import { AGENTFLOW_ICONS, baseURL } from '@/store/constant' import { JSONViewer } from '@/ui-component/json/JsonViewer' import ReactJson from 'flowise-react-json-view' import { CodeEditor } from '@/ui-component/editor/CodeEditor' +import SourceDocDialog from '@/ui-component/dialog/SourceDocDialog' import predictionApi from '@/api/prediction' @@ -43,6 +46,8 @@ export const NodeExecutionDetails = ({ data, label, status, metadata, isPublic, const [feedbackType, setFeedbackType] = useState('') const [isLoading, setIsLoading] = useState(false) const [loadingMessage, setLoadingMessage] = useState('') + const [sourceDialogOpen, setSourceDialogOpen] = useState(false) + const [sourceDialogProps, setSourceDialogProps] = useState({}) const customization = useSelector((state) => state.customization) const theme = useTheme() const { enqueueSnackbar } = useSnackbar() @@ -159,6 +164,11 @@ export const NodeExecutionDetails = ({ data, label, status, metadata, isPublic, } } + const onUsedToolClick = (data, title) => { + setSourceDialogProps({ data, title }) + setSourceDialogOpen(true) + } + const handleSubmitFeedback = () => { onSubmitResponse(feedbackType, feedback) setOpenFeedbackDialog(false) @@ -166,6 +176,26 @@ export const NodeExecutionDetails = ({ data, label, status, metadata, isPublic, setFeedbackType('') } + const downloadFile = async (fileAnnotation) => { + try { + const response = await axios.post( + `${baseURL}/api/v1/openai-assistants-file/download`, + { fileName: fileAnnotation.fileName, chatflowId: metadata?.agentflowId, chatId: metadata?.sessionId }, + { responseType: 'blob' } + ) + const blob = new Blob([response.data], { type: response.headers['content-type'] }) + const downloadUrl = window.URL.createObjectURL(blob) + const link = document.createElement('a') + link.href = downloadUrl + link.download = fileAnnotation.fileName + document.body.appendChild(link) + link.click() + link.remove() + } catch (error) { + console.error('Download failed:', error) + } + } + const renderFullfilledConditions = (conditions) => { const fullfilledConditions = conditions.filter((condition) => condition.isFulfilled) return fullfilledConditions.map((condition, index) => { @@ -660,6 +690,35 @@ export const NodeExecutionDetails = ({ data, label, status, metadata, isPublic, )} + {message.additional_kwargs?.usedTools && message.additional_kwargs.usedTools.length > 0 && ( +
    + {message.additional_kwargs.usedTools.map((tool, index) => { + return tool ? ( + } + onClick={() => onUsedToolClick(tool, 'Used Tools')} + /> + ) : null + })} +
    + )} {message.additional_kwargs?.artifacts && message.additional_kwargs.artifacts.length > 0 && ( @@ -690,7 +749,7 @@ export const NodeExecutionDetails = ({ data, label, status, metadata, isPublic, )}` : artifact.data } - sx={{ height: 'auto', maxHeight: '500px' }} + sx={{ height: 'auto', maxHeight: '500px', objectFit: 'contain' }} alt={`artifact-${artifactIndex}`} /> @@ -708,7 +767,7 @@ export const NodeExecutionDetails = ({ data, label, status, metadata, isPublic, backgroundColor: theme.palette.background.paper }} > -
    +
    ) } else { @@ -796,6 +855,36 @@ export const NodeExecutionDetails = ({ data, label, status, metadata, isPublic, return {`*No data*`} } })()} + {message.additional_kwargs?.fileAnnotations && message.additional_kwargs.fileAnnotations.length > 0 && ( +
    + {message.additional_kwargs.fileAnnotations.map((fileAnnotation, index) => { + return ( + + ) + })} +
    + )}
    )) ) : data?.input?.form || data?.input?.http || data?.input?.conditions ? ( @@ -861,6 +950,106 @@ export const NodeExecutionDetails = ({ data, label, status, metadata, isPublic, backgroundColor: theme.palette.background.default }} > + {data.output?.usedTools && data.output.usedTools.length > 0 && ( +
    + {data.output.usedTools.map((tool, index) => { + return tool ? ( + } + onClick={() => onUsedToolClick(tool, 'Used Tools')} + /> + ) : null + })} +
    + )} + {data.output?.artifacts && data.output.artifacts.length > 0 && ( + + + {data.output.artifacts.map((artifact, artifactIndex) => { + if (artifact.type === 'png' || artifact.type === 'jpeg' || artifact.type === 'jpg') { + return ( + + + + ) + } else if (artifact.type === 'html') { + return ( + + + + ) + } else { + return ( + + {artifact.data} + + ) + } + })} + + + )} {(() => { // Check if the content is a stringified JSON or array if (data?.output?.content) { @@ -881,6 +1070,36 @@ export const NodeExecutionDetails = ({ data, label, status, metadata, isPublic, return {`*No data*`} } })()} + {data.output?.fileAnnotations && data.output.fileAnnotations.length > 0 && ( +
    + {data.output.fileAnnotations.map((fileAnnotation, index) => { + return ( + + ) + })} +
    + )} )} {data.error && ( @@ -1019,6 +1238,7 @@ export const NodeExecutionDetails = ({ data, label, status, metadata, isPublic, )} + setSourceDialogOpen(false)} /> ) } diff --git a/packages/ui/src/views/agentexecutions/PublicExecutionDetails.jsx b/packages/ui/src/views/agentexecutions/PublicExecutionDetails.jsx index a0c9fa1eb..ed650ac29 100644 --- a/packages/ui/src/views/agentexecutions/PublicExecutionDetails.jsx +++ b/packages/ui/src/views/agentexecutions/PublicExecutionDetails.jsx @@ -10,7 +10,7 @@ import executionsApi from '@/api/executions' import useApi from '@/hooks/useApi' // MUI -import { Box, Card, Stack, Typography, useTheme } from '@mui/material' +import { Box, Card, Stack, Typography, useTheme, CircularProgress } from '@mui/material' import { IconCircleXFilled } from '@tabler/icons-react' import { alpha } from '@mui/material/styles' @@ -38,8 +38,16 @@ const PublicExecutionDetails = () => { const executionDetails = typeof execution.executionData === 'string' ? JSON.parse(execution.executionData) : execution.executionData setExecution(executionDetails) - setSelectedMetadata(omit(execution, ['executionData'])) + const newMetadata = { + ...omit(execution, ['executionData']), + agentflow: { + ...selectedMetadata.agentflow + } + } + setSelectedMetadata(newMetadata) } + + // eslint-disable-next-line react-hooks/exhaustive-deps }, [getExecutionByIdPublicApi.data]) useEffect(() => { @@ -48,9 +56,13 @@ const PublicExecutionDetails = () => { return ( <> - {!isLoading ? ( + {isLoading ? ( + + + + ) : ( <> - {!execution || getExecutionByIdPublicApi.error ? ( + {getExecutionByIdPublicApi.error ? ( { /> )} - ) : null} + )} ) } diff --git a/packages/ui/src/views/agentexecutions/index.jsx b/packages/ui/src/views/agentexecutions/index.jsx index 90d8600c2..0f3663cd1 100644 --- a/packages/ui/src/views/agentexecutions/index.jsx +++ b/packages/ui/src/views/agentexecutions/index.jsx @@ -4,7 +4,6 @@ import 'react-datepicker/dist/react-datepicker.css' // material-ui import { - Pagination, Box, Stack, TextField, @@ -21,7 +20,6 @@ import { DialogTitle, IconButton, Tooltip, - Typography, useTheme } from '@mui/material' @@ -29,6 +27,7 @@ import { import MainCard from '@/ui-component/cards/MainCard' import ErrorBoundary from '@/ErrorBoundary' import ViewHeader from '@/layout/MainLayout/ViewHeader' +import { Available } from '@/ui-component/rbac/available' // API import useApi from '@/hooks/useApi' @@ -43,6 +42,7 @@ import { IconTrash } from '@tabler/icons-react' import { ExecutionsListTable } from '@/ui-component/table/ExecutionsListTable' import { ExecutionDetails } from './ExecutionDetails' import { omit } from 'lodash' +import TablePagination, { DEFAULT_ITEMS_PER_PAGE } from '@/ui-component/pagination/TablePagination' // ==============================|| AGENT EXECUTIONS ||============================== // @@ -68,13 +68,9 @@ const AgentExecutions = () => { startDate: null, endDate: null, agentflowId: '', + agentflowName: '', sessionId: '' }) - const [pagination, setPagination] = useState({ - page: 1, - limit: 10, - total: 0 - }) const handleFilterChange = (field, value) => { setFilters({ @@ -93,26 +89,25 @@ const AgentExecutions = () => { }) } - const handlePageChange = (event, newPage) => { - setPagination({ - ...pagination, - page: newPage - }) + /* Table Pagination */ + const [currentPage, setCurrentPage] = useState(1) + const [pageLimit, setPageLimit] = useState(DEFAULT_ITEMS_PER_PAGE) + const [total, setTotal] = useState(0) + const onChange = (page, pageLimit) => { + setCurrentPage(page) + setPageLimit(pageLimit) + applyFilters(page, pageLimit) } - const handleLimitChange = (event) => { - setPagination({ - ...pagination, - page: 1, // Reset to first page when changing items per page - limit: parseInt(event.target.value, 10) - }) - } - - const applyFilters = () => { + const applyFilters = (page, limit) => { setLoading(true) + // Ensure page and limit are numbers, not objects + const pageNum = typeof page === 'number' ? page : currentPage + const limitNum = typeof limit === 'number' ? limit : pageLimit + const params = { - page: pagination.page, - limit: pagination.limit + page: pageNum, + limit: limitNum } if (filters.state) params.state = filters.state @@ -138,6 +133,7 @@ const AgentExecutions = () => { } if (filters.agentflowId) params.agentflowId = filters.agentflowId + if (filters.agentflowName) params.agentflowName = filters.agentflowName if (filters.sessionId) params.sessionId = filters.sessionId getAllExecutions.request(params) @@ -149,9 +145,11 @@ const AgentExecutions = () => { startDate: null, endDate: null, agentflowId: '', + agentflowName: '', sessionId: '' }) - getAllExecutions.request() + setCurrentPage(1) + getAllExecutions.request({ page: 1, limit: pageLimit }) } const handleExecutionSelectionChange = (selectedIds) => { @@ -174,7 +172,7 @@ const AgentExecutions = () => { } useEffect(() => { - getAllExecutions.request() + getAllExecutions.request({ page: 1, limit: DEFAULT_ITEMS_PER_PAGE }) // eslint-disable-next-line react-hooks/exhaustive-deps }, []) @@ -185,7 +183,7 @@ const AgentExecutions = () => { const { data, total } = getAllExecutions.data if (!Array.isArray(data)) return setExecutions(data) - setPagination((prev) => ({ ...prev, total })) + setTotal(total) } catch (e) { console.error(e) } @@ -200,17 +198,12 @@ const AgentExecutions = () => { setError(getAllExecutions.error) }, [getAllExecutions.error]) - useEffect(() => { - applyFilters() - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [pagination.page, pagination.limit]) - useEffect(() => { if (deleteExecutionsApi.data) { // Refresh the executions list getAllExecutions.request({ - page: pagination.page, - limit: pagination.limit + page: currentPage, + limit: pageLimit }) setSelectedExecutionIds([]) } @@ -224,8 +217,16 @@ const AgentExecutions = () => { const executionDetails = typeof execution.executionData === 'string' ? JSON.parse(execution.executionData) : execution.executionData setSelectedExecutionData(executionDetails) - setSelectedMetadata(omit(execution, ['executionData'])) + const newMetadata = { + ...omit(execution, ['executionData']), + agentflow: { + ...selectedMetadata.agentflow + } + } + setSelectedMetadata(newMetadata) } + + // eslint-disable-next-line react-hooks/exhaustive-deps }, [getExecutionByIdApi.data]) return ( @@ -314,6 +315,20 @@ const AgentExecutions = () => { } /> + + handleFilterChange('agentflowName', e.target.value)} + size='small' + sx={{ + '& .MuiOutlinedInput-notchedOutline': { + borderColor: borderColor + } + }} + /> + { }} /> - + - - - - - - - - + + + + + + + + + - { - setOpenDrawer(true) - const executionDetails = - typeof execution.executionData === 'string' ? JSON.parse(execution.executionData) : execution.executionData - setSelectedExecutionData(executionDetails) - setSelectedMetadata(omit(execution, ['executionData'])) - }} - /> - - {/* Pagination and Page Size Controls */} - - - Items per page: - 0 && ( + <> + { + setOpenDrawer(true) + const executionDetails = + typeof execution.executionData === 'string' + ? JSON.parse(execution.executionData) + : execution.executionData + setSelectedExecutionData(executionDetails) + setSelectedMetadata(omit(execution, ['executionData'])) }} - > - - - - - + /> - setOpenDrawer(false)} - onProceedSuccess={() => { - setOpenDrawer(false) - getAllExecutions.request() - }} - onUpdateSharing={() => { - getAllExecutions.request() - }} - onRefresh={(executionId) => { - getAllExecutions.request() - getExecutionByIdApi.request(executionId) - }} - /> + {/* Pagination and Page Size Controls */} + {!isLoading && total > 0 && ( + + )} + + setOpenDrawer(false)} + onProceedSuccess={() => { + setOpenDrawer(false) + getAllExecutions.request() + }} + onUpdateSharing={() => { + getAllExecutions.request() + }} + onRefresh={(executionId) => { + getAllExecutions.request() + getExecutionByIdApi.request(executionId) + }} + /> + + )} {/* Delete Confirmation Dialog */} { const navigate = useNavigate() const theme = useTheme() + const customization = useSelector((state) => state.customization) const [isLoading, setLoading] = useState(true) - const [error, setError] = useState(null) const [images, setImages] = useState({}) const [icons, setIcons] = useState({}) const [search, setSearch] = useState('') - const [loginDialogOpen, setLoginDialogOpen] = useState(false) - const [loginDialogProps, setLoginDialogProps] = useState({}) + const { error, setError } = useError() const getAllAgentflows = useApi(chatflowsApi.getAllAgentflows) const [view, setView] = useState(localStorage.getItem('flowDisplayStyle') || 'card') const [agentflowVersion, setAgentflowVersion] = useState(localStorage.getItem('agentFlowVersion') || 'v2') + const [showDeprecationNotice, setShowDeprecationNotice] = useState(true) + + /* Table Pagination */ + const [currentPage, setCurrentPage] = useState(1) + const [pageLimit, setPageLimit] = useState(DEFAULT_ITEMS_PER_PAGE) + const [total, setTotal] = useState(0) + + const onChange = (page, pageLimit) => { + setCurrentPage(page) + setPageLimit(pageLimit) + refresh(page, pageLimit, agentflowVersion) + } + + const refresh = (page, limit, nextView) => { + const params = { + page: page || currentPage, + limit: limit || pageLimit + } + getAllAgentflows.request(nextView === 'v2' ? 'AGENTFLOW' : 'MULTIAGENT', params) + } const handleChange = (event, nextView) => { if (nextView === null) return @@ -57,7 +78,7 @@ const Agentflows = () => { if (nextView === null) return localStorage.setItem('agentFlowVersion', nextView) setAgentflowVersion(nextView) - getAllAgentflows.request(nextView === 'v2' ? 'AGENTFLOW' : 'MULTIAGENT') + refresh(1, pageLimit, nextView) } const onSearchChange = (event) => { @@ -72,12 +93,6 @@ const Agentflows = () => { ) } - const onLoginClick = (username, password) => { - localStorage.setItem('username', username) - localStorage.setItem('password', password) - navigate(0) - } - const addNew = () => { if (agentflowVersion === 'v2') { navigate('/v2/agentcanvas') @@ -94,24 +109,22 @@ const Agentflows = () => { } } + const handleDismissDeprecationNotice = () => { + setShowDeprecationNotice(false) + } + useEffect(() => { - getAllAgentflows.request(agentflowVersion === 'v2' ? 'AGENTFLOW' : 'MULTIAGENT') + refresh(currentPage, pageLimit, agentflowVersion) // eslint-disable-next-line react-hooks/exhaustive-deps }, []) useEffect(() => { if (getAllAgentflows.error) { - if (getAllAgentflows.error?.response?.status === 401) { - setLoginDialogProps({ - title: 'Login', - confirmButtonName: 'Login' - }) - setLoginDialogOpen(true) - } else { - setError(getAllAgentflows.error) - } + setError(getAllAgentflows.error) } + + // eslint-disable-next-line react-hooks/exhaustive-deps }, [getAllAgentflows.error]) useEffect(() => { @@ -121,7 +134,8 @@ const Agentflows = () => { useEffect(() => { if (getAllAgentflows.data) { try { - const agentflows = getAllAgentflows.data + const agentflows = getAllAgentflows.data?.data + setTotal(getAllAgentflows.data?.total) const images = {} const icons = {} for (let i = 0; i < agentflows.length; i += 1) { @@ -131,13 +145,17 @@ const Agentflows = () => { images[agentflows[i].id] = [] icons[agentflows[i].id] = [] for (let j = 0; j < nodes.length; j += 1) { + if (nodes[j].data.name === 'stickyNote' || nodes[j].data.name === 'stickyNoteAgentflow') continue const foundIcon = AGENTFLOW_ICONS.find((icon) => icon.name === nodes[j].data.name) if (foundIcon) { icons[agentflows[i].id].push(foundIcon) } else { const imageSrc = `${baseURL}/api/v1/node-icon/${nodes[j].data.name}` - if (!images[agentflows[i].id].includes(imageSrc)) { - images[agentflows[i].id].push(imageSrc) + if (!images[agentflows[i].id].some((img) => img.imageSrc === imageSrc)) { + images[agentflows[i].id].push({ + imageSrc, + label: nodes[j].data.label + }) } } } @@ -174,7 +192,7 @@ const Agentflows = () => { sx={{ borderColor: theme.palette.grey[900] + 25, borderRadius: 2, - color: theme?.customization?.isDarkMode ? 'white' : 'inherit' + color: customization.isDarkMode ? 'white' : 'inherit' }} variant='contained' value='v2' @@ -187,7 +205,7 @@ const Agentflows = () => { sx={{ borderColor: theme.palette.grey[900] + 25, borderRadius: 2, - color: theme?.customization?.isDarkMode ? 'white' : 'inherit' + color: customization.isDarkMode ? 'white' : 'inherit' }} variant='contained' value='v1' @@ -199,6 +217,7 @@ const Agentflows = () => { { sx={{ borderColor: theme.palette.grey[900] + 25, borderRadius: 2, - color: theme?.customization?.isDarkMode ? 'white' : 'inherit' + color: customization.isDarkMode ? 'white' : 'inherit' }} variant='contained' value='card' @@ -219,7 +238,7 @@ const Agentflows = () => { sx={{ borderColor: theme.palette.grey[900] + 25, borderRadius: 2, - color: theme?.customization?.isDarkMode ? 'white' : 'inherit' + color: customization.isDarkMode ? 'white' : 'inherit' }} variant='contained' value='list' @@ -228,21 +247,64 @@ const Agentflows = () => { - } sx={{ borderRadius: 2, height: 40 }}> + } + sx={{ borderRadius: 2, height: 40 }} + > Add New - + - {!view || view === 'card' ? ( + + {/* Deprecation Notice For V1 */} + {agentflowVersion === 'v1' && showDeprecationNotice && ( + + + + V1 Agentflows are deprecated. We recommend migrating to V2 for improved performance and + continued support. + + + + + + )} + {!isLoading && total > 0 && ( <> - {isLoading && !getAllAgentflows.data ? ( + {!view || view === 'card' ? ( - - - - - ) : ( - - {getAllAgentflows.data?.filter(filterFlows).map((data, index) => ( + {getAllAgentflows.data?.data.filter(filterFlows).map((data, index) => ( goToCanvas(data)} @@ -252,21 +314,27 @@ const Agentflows = () => { /> ))} + ) : ( + )} + {/* Pagination and Page Size Controls */} + - ) : ( - )} - {!isLoading && (!getAllAgentflows.data || getAllAgentflows.data.length === 0) && ( + + {!isLoading && total === 0 && ( { )} )} - - ) diff --git a/packages/ui/src/views/agentflowsv2/AgentFlowNode.jsx b/packages/ui/src/views/agentflowsv2/AgentFlowNode.jsx index c5a71913a..72c4e9002 100644 --- a/packages/ui/src/views/agentflowsv2/AgentFlowNode.jsx +++ b/packages/ui/src/views/agentflowsv2/AgentFlowNode.jsx @@ -20,7 +20,13 @@ import { IconCopy, IconTrash, IconInfoCircle, - IconLoader + IconLoader, + IconAlertCircleFilled, + IconCode, + IconWorldWww, + IconPhoto, + IconBrandGoogle, + IconBrowserCheck } from '@tabler/icons-react' import StopCircleIcon from '@mui/icons-material/StopCircle' import CancelIcon from '@mui/icons-material/Cancel' @@ -51,11 +57,13 @@ const StyledNodeToolbar = styled(NodeToolbar)(({ theme }) => ({ const AgentFlowNode = ({ data }) => { const theme = useTheme() const customization = useSelector((state) => state.customization) + const canvas = useSelector((state) => state.canvas) const ref = useRef(null) const updateNodeInternals = useUpdateNodeInternals() // eslint-disable-next-line const [position, setPosition] = useState(0) const [isHovered, setIsHovered] = useState(false) + const [warningMessage, setWarningMessage] = useState('') const { deleteNode, duplicateNode } = useContext(flowContext) const [showInfoDialog, setShowInfoDialog] = useState(false) const [infoDialogProps, setInfoDialogProps] = useState({}) @@ -123,6 +131,41 @@ const AgentFlowNode = ({ data }) => { return } + const getBuiltInOpenAIToolIcon = (toolName) => { + switch (toolName) { + case 'web_search_preview': + return + case 'code_interpreter': + return + case 'image_generation': + return + default: + return null + } + } + + const getBuiltInGeminiToolIcon = (toolName) => { + switch (toolName) { + case 'urlContext': + return + case 'googleSearch': + return + default: + return null + } + } + + const getBuiltInAnthropicToolIcon = (toolName) => { + switch (toolName) { + case 'web_search_20250305': + return + case 'web_fetch_20250910': + return + default: + return null + } + } + useEffect(() => { if (ref.current) { setTimeout(() => { @@ -132,6 +175,30 @@ const AgentFlowNode = ({ data }) => { } }, [data, ref, updateNodeInternals]) + useEffect(() => { + const nodeOutdatedMessage = (oldVersion, newVersion) => + `Node version ${oldVersion} outdated\nUpdate to latest version ${newVersion}` + const nodeVersionEmptyMessage = (newVersion) => `Node outdated\nUpdate to latest version ${newVersion}` + + const componentNode = canvas.componentNodes.find((nd) => nd.name === data.name) + if (componentNode) { + if (!data.version) { + setWarningMessage(nodeVersionEmptyMessage(componentNode.version)) + } else if (data.version && componentNode.version > data.version) { + setWarningMessage(nodeOutdatedMessage(data.version, componentNode.version)) + } else if (componentNode.badge === 'DEPRECATING') { + setWarningMessage( + componentNode?.deprecateMessage ?? + 'This node will be deprecated in the next release. Change to a new node tagged with NEW' + ) + } else if (componentNode.warning) { + setWarningMessage(componentNode.warning) + } else { + setWarningMessage('') + } + } + }, [canvas.componentNodes, data.name, data.version]) + return (
    setIsHovered(true)} onMouseLeave={() => setIsHovered(false)}> @@ -236,6 +303,24 @@ const AgentFlowNode = ({ data }) => { )} + {warningMessage && ( + {warningMessage}}> + + + + + )} + {!data.hideInput && ( { { tools: data.inputs?.llmTools, toolProperty: 'llmSelectedTool' }, { tools: data.inputs?.agentTools, toolProperty: 'agentSelectedTool' }, { - tools: data.inputs?.selectedTool ? [{ selectedTool: data.inputs?.selectedTool }] : [], - toolProperty: 'selectedTool' + tools: + data.inputs?.selectedTool ?? data.inputs?.toolAgentflowSelectedTool + ? [{ selectedTool: data.inputs?.selectedTool ?? data.inputs?.toolAgentflowSelectedTool }] + : [], + toolProperty: ['selectedTool', 'toolAgentflowSelectedTool'] }, - { tools: data.inputs?.agentKnowledgeVSEmbeddings, toolProperty: ['vectorStore', 'embeddingModel'] } + { tools: data.inputs?.agentKnowledgeVSEmbeddings, toolProperty: ['vectorStore', 'embeddingModel'] }, + { + tools: data.inputs?.agentToolsBuiltInOpenAI + ? (typeof data.inputs.agentToolsBuiltInOpenAI === 'string' + ? JSON.parse(data.inputs.agentToolsBuiltInOpenAI) + : data.inputs.agentToolsBuiltInOpenAI + ).map((tool) => ({ builtInTool: tool })) + : [], + toolProperty: 'builtInTool', + isBuiltInOpenAI: true + }, + { + tools: data.inputs?.agentToolsBuiltInGemini + ? (typeof data.inputs.agentToolsBuiltInGemini === 'string' + ? JSON.parse(data.inputs.agentToolsBuiltInGemini) + : data.inputs.agentToolsBuiltInGemini + ).map((tool) => ({ builtInTool: tool })) + : [], + toolProperty: 'builtInTool', + isBuiltInGemini: true + }, + { + tools: data.inputs?.agentToolsBuiltInAnthropic + ? (typeof data.inputs.agentToolsBuiltInAnthropic === 'string' + ? JSON.parse(data.inputs.agentToolsBuiltInAnthropic) + : data.inputs.agentToolsBuiltInAnthropic + ).map((tool) => ({ builtInTool: tool })) + : [], + toolProperty: 'builtInTool', + isBuiltInAnthropic: true + } ] // Filter out undefined tools and render each valid collection @@ -378,49 +496,115 @@ const AgentFlowNode = ({ data }) => { return ( - {toolName} - + /> ) }) } else { const toolName = tool[config.toolProperty] if (!toolName) return [] + // Handle built-in OpenAI tools with icons + if (config.isBuiltInOpenAI) { + const icon = getBuiltInOpenAIToolIcon(toolName) + if (!icon) return [] + + return [ + + {icon} + + ] + } + + // Handle built-in Gemini tools with icons + if (config.isBuiltInGemini) { + const icon = getBuiltInGeminiToolIcon(toolName) + if (!icon) return [] + + return [ + + {icon} + + ] + } + + // Handle built-in Anthropic tools with icons + if (config.isBuiltInAnthropic) { + const icon = getBuiltInAnthropicToolIcon(toolName) + if (!icon) return [] + + return [ + + {icon} + + ] + } + return [ - {toolName} - + /> ] } })} diff --git a/packages/ui/src/views/agentflowsv2/Canvas.jsx b/packages/ui/src/views/agentflowsv2/Canvas.jsx index d0b3c6ec7..07bf57df5 100644 --- a/packages/ui/src/views/agentflowsv2/Canvas.jsx +++ b/packages/ui/src/views/agentflowsv2/Canvas.jsx @@ -42,7 +42,7 @@ import useApi from '@/hooks/useApi' import useConfirm from '@/hooks/useConfirm' // icons -import { IconX, IconRefreshAlert } from '@tabler/icons-react' +import { IconX, IconRefreshAlert, IconMagnetFilled, IconMagnetOff, IconArtboard, IconArtboardOff } from '@tabler/icons-react' // utils import { @@ -100,6 +100,8 @@ const AgentflowCanvas = () => { const [isSyncNodesButtonEnabled, setIsSyncNodesButtonEnabled] = useState(false) const [editNodeDialogOpen, setEditNodeDialogOpen] = useState(false) const [editNodeDialogProps, setEditNodeDialogProps] = useState({}) + const [isSnappingEnabled, setIsSnappingEnabled] = useState(false) + const [isBackgroundEnabled, setIsBackgroundEnabled] = useState(true) const reactFlowWrapper = useRef(null) @@ -718,17 +720,40 @@ const AgentflowCanvas = () => { fitView deleteKeyCode={canvas.canvasDialogShow ? null : ['Delete']} minZoom={0.5} + snapGrid={[25, 25]} + snapToGrid={isSnappingEnabled} connectionLineComponent={ConnectionLine} > + > + + + { backgroundColor: customization.isDarkMode ? theme.palette.background.default : '#fff' }} /> - + {isBackgroundEnabled && } { setExpanded(isExpanded) } @@ -61,6 +68,18 @@ export const ConfigInput = ({ data, inputParam, disabled = false, arrayIndex = n setSelectedComponentNodeData(nodeData) } + // Memoize current input values for reliable comparison + const currentInputValues = useMemo( + () => ({ + mainValue: data.inputs[inputParam.name], + configValue: data.inputs[`${inputParam.name}Config`], + arrayValue: parentParamForArray ? data.inputs[parentParamForArray.name] : null + }), + + // eslint-disable-next-line react-hooks/exhaustive-deps + [data.inputs, inputParam.name, parentParamForArray?.name] + ) + // Load initial component data when the component mounts useEffect(() => { const loadComponentData = async () => { @@ -133,6 +152,13 @@ export const ConfigInput = ({ data, inputParam, disabled = false, arrayIndex = n componentNodeData.credential = credential ? credential : undefined setSelectedComponentNodeData(componentNodeData) + + // Store the processed inputs to track changes + setLastProcessedInputs({ + mainValue: data.inputs[inputParam.name], + configValue: data.inputs[`${inputParam.name}Config`], + arrayValue: parentParamForArray ? data.inputs[parentParamForArray.name] : null + }) } loadComponentData() @@ -140,6 +166,87 @@ export const ConfigInput = ({ data, inputParam, disabled = false, arrayIndex = n // eslint-disable-next-line react-hooks/exhaustive-deps }, []) + // Handle external changes to data.inputs + useEffect(() => { + if (!selectedComponentNodeData.inputParams) return + + // Check if relevant inputs have changed using strict equality comparison + const hasMainValueChanged = lastProcessedInputs.mainValue !== currentInputValues.mainValue + const hasConfigValueChanged = lastProcessedInputs.configValue !== currentInputValues.configValue + const hasArrayValueChanged = lastProcessedInputs.arrayValue !== currentInputValues.arrayValue + + if (!hasMainValueChanged && !hasConfigValueChanged && !hasArrayValueChanged) { + return // No relevant changes + } + + // Update selectedComponentNodeData with new input values + const updateComponentData = () => { + const updatedComponentData = cloneDeep(selectedComponentNodeData) + + // Helper functions (same as in initial load) + const hasArrayConfig = () => { + return ( + parentParamForArray && + data.inputs[parentParamForArray.name] && + Array.isArray(data.inputs[parentParamForArray.name]) && + data.inputs[parentParamForArray.name][arrayIndex] && + data.inputs[parentParamForArray.name][arrayIndex][`${inputParam.name}Config`] + ) + } + + const getCurrentInputValue = () => { + return hasArrayConfig() ? data.inputs[parentParamForArray.name][arrayIndex][inputParam.name] : data.inputs[inputParam.name] + } + + const getConfigData = () => { + return hasArrayConfig() + ? data.inputs[parentParamForArray.name][arrayIndex][`${inputParam.name}Config`] + : data.inputs[`${inputParam.name}Config`] + } + + // Update the main input value in component data + const currentValue = getCurrentInputValue() + if (currentValue !== undefined) { + updatedComponentData.inputs[inputParam.name] = currentValue + } + + // If there's config data and it matches the current value, use it + if (hasArrayConfig() || data.inputs[`${inputParam.name}Config`]) { + const configData = getConfigData() + if (configData && configData[inputParam.name] === currentValue) { + // Config is still valid, merge it with current value + updatedComponentData.inputs = { ...configData, [inputParam.name]: currentValue } + } else if (hasMainValueChanged) { + // Main value changed but config doesn't match, reset to defaults with new value + const defaultInput = initializeDefaultNodeData(updatedComponentData.inputParams) + updatedComponentData.inputs = { ...defaultInput, [inputParam.name]: currentValue } + } + } + + // Update input parameters visibility + updatedComponentData.inputParams = showHideInputParams({ + ...updatedComponentData, + inputs: updatedComponentData.inputs + }) + + const credential = updatedComponentData.inputs.credential || updatedComponentData.inputs[FLOWISE_CREDENTIAL_ID] + updatedComponentData.credential = credential ? credential : undefined + + setSelectedComponentNodeData(updatedComponentData) + + // Update the tracked values + setLastProcessedInputs({ + mainValue: currentInputValues.mainValue, + configValue: currentInputValues.configValue, + arrayValue: currentInputValues.arrayValue + }) + } + + updateComponentData() + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [currentInputValues, selectedComponentNodeData.inputParams, inputParam.name, parentParamForArray?.name, arrayIndex]) + // Update node configuration when selected component data changes useEffect(() => { if (!selectedComponentNodeData.inputs) return @@ -169,7 +276,7 @@ export const ConfigInput = ({ data, inputParam, disabled = false, arrayIndex = n ) // eslint-disable-next-line react-hooks/exhaustive-deps - }, [data.inputs, arrayIndex, parentParamForArray, selectedComponentNodeData]) + }, [selectedComponentNodeData]) return ( <> @@ -185,8 +292,21 @@ export const ConfigInput = ({ data, inputParam, disabled = false, arrayIndex = n > } sx={{ background: 'transparent' }}> - - {selectedComponentNodeData?.label} Parameters +
    + + {selectedComponentNodeData?.label} Parameters +
    + {selectedComponentNodeData?.warning && ( + {selectedComponentNodeData.warning}} + placement='top' + > + + + + + )} +
    {(selectedComponentNodeData.inputParams ?? []) diff --git a/packages/ui/src/views/agentflowsv2/MarketplaceCanvas.jsx b/packages/ui/src/views/agentflowsv2/MarketplaceCanvas.jsx index 3e4ef254e..aa62363c4 100644 --- a/packages/ui/src/views/agentflowsv2/MarketplaceCanvas.jsx +++ b/packages/ui/src/views/agentflowsv2/MarketplaceCanvas.jsx @@ -4,6 +4,7 @@ import 'reactflow/dist/style.css' import '@/views/canvas/index.css' import { useLocation, useNavigate } from 'react-router-dom' +import { useSelector } from 'react-redux' // material-ui import { Toolbar, Box, AppBar } from '@mui/material' @@ -18,6 +19,9 @@ import StickyNote from './StickyNote' import EditNodeDialog from '@/views/agentflowsv2/EditNodeDialog' import { flowContext } from '@/store/context/ReactFlowContext' +// icons +import { IconMagnetFilled, IconMagnetOff, IconArtboard, IconArtboardOff } from '@tabler/icons-react' + const nodeTypes = { agentFlow: AgentFlowNode, stickyNote: StickyNote, iteration: IterationNode } const edgeTypes = { agentFlow: AgentFlowEdge } @@ -26,6 +30,7 @@ const edgeTypes = { agentFlow: AgentFlowEdge } const MarketplaceCanvasV2 = () => { const theme = useTheme() const navigate = useNavigate() + const customization = useSelector((state) => state.customization) const { state } = useLocation() const { flowData, name } = state @@ -36,6 +41,8 @@ const MarketplaceCanvasV2 = () => { const [edges, setEdges, onEdgesChange] = useEdgesState() const [editNodeDialogOpen, setEditNodeDialogOpen] = useState(false) const [editNodeDialogProps, setEditNodeDialogProps] = useState({}) + const [isSnappingEnabled, setIsSnappingEnabled] = useState(false) + const [isBackgroundEnabled, setIsBackgroundEnabled] = useState(true) const reactFlowWrapper = useRef(null) const { setReactFlowInstance } = useContext(flowContext) @@ -108,16 +115,40 @@ const MarketplaceCanvasV2 = () => { edgeTypes={edgeTypes} fitView minZoom={0.1} + snapGrid={[25, 25]} + snapToGrid={isSnappingEnabled} > - + > + + + + {isBackgroundEnabled && } {moment(props.apiKey.createdAt).format('MMMM Do, YYYY')} - - - - - - - - - - + + + + + + + + + + + + + + {open && ( @@ -199,12 +206,12 @@ const APIKey = () => { const dispatch = useDispatch() useNotifier() + const { error, setError } = useError() const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) const [isLoading, setLoading] = useState(true) - const [error, setError] = useState(null) const [showDialog, setShowDialog] = useState(false) const [dialogProps, setDialogProps] = useState({}) const [apiKeys, setAPIKeys] = useState([]) @@ -216,6 +223,26 @@ const APIKey = () => { const [uploadDialogProps, setUploadDialogProps] = useState({}) const [search, setSearch] = useState('') + + /* Table Pagination */ + const [currentPage, setCurrentPage] = useState(1) + const [pageLimit, setPageLimit] = useState(DEFAULT_ITEMS_PER_PAGE) + const [total, setTotal] = useState(0) + + const onChange = (page, pageLimit) => { + setCurrentPage(page) + setPageLimit(pageLimit) + refresh(page, pageLimit) + } + + const refresh = (page, limit) => { + const params = { + page: page || currentPage, + limit: limit || pageLimit + } + getAllAPIKeysApi.request(params) + } + const onSearchChange = (event) => { setSearch(event.target.value) } @@ -335,12 +362,11 @@ const APIKey = () => { const onConfirm = () => { setShowDialog(false) setShowUploadDialog(false) - getAllAPIKeysApi.request() + refresh(currentPage, pageLimit) } useEffect(() => { - getAllAPIKeysApi.request() - + refresh(currentPage, pageLimit) // eslint-disable-next-line react-hooks/exhaustive-deps }, []) @@ -350,16 +376,11 @@ const APIKey = () => { useEffect(() => { if (getAllAPIKeysApi.data) { - setAPIKeys(getAllAPIKeysApi.data) + setAPIKeys(getAllAPIKeysApi.data?.data) + setTotal(getAllAPIKeysApi.data?.total) } }, [getAllAPIKeysApi.data]) - useEffect(() => { - if (getAllAPIKeysApi.error) { - setError(getAllAPIKeysApi.error) - } - }, [getAllAPIKeysApi.error]) - return ( <> @@ -374,7 +395,8 @@ const APIKey = () => { title='API Keys' description='Flowise API & SDK authentication keys' > - - + { id='btn_createApiKey' > Create Key - + - {!isLoading && apiKeys.length <= 0 ? ( + {!isLoading && apiKeys?.length <= 0 ? ( {
    No API Keys Yet
    ) : ( - - - - - Key Name - API Key - Usage - Created - - - - - - {isLoading ? ( - <> - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ) : ( - <> - {apiKeys.filter(filterKeys).map((key, index) => ( - { - navigator.clipboard.writeText(key.apiKey) - setAnchorEl(event.currentTarget) - setTimeout(() => { - handleClosePopOver() - }, 1500) - }} - onShowAPIClick={() => onShowApiKeyClick(key.apiKey)} - open={openPopOver} - anchorEl={anchorEl} - onClose={handleClosePopOver} - theme={theme} - onEditClick={() => edit(key)} - onDeleteClick={() => deleteKey(key)} - /> - ))} - - )} - -
    -
    + <> + + + + + Key Name + API Key + Usage + Updated + + + + + + + + + + {isLoading ? ( + <> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ) : ( + <> + {apiKeys?.filter(filterKeys).map((key, index) => ( + { + navigator.clipboard.writeText(key.apiKey) + setAnchorEl(event.currentTarget) + setTimeout(() => { + handleClosePopOver() + }, 1500) + }} + onShowAPIClick={() => onShowApiKeyClick(key.apiKey)} + open={openPopOver} + anchorEl={anchorEl} + onClose={handleClosePopOver} + theme={theme} + onEditClick={() => edit(key)} + onDeleteClick={() => deleteKey(key)} + /> + ))} + + )} + +
    +
    + {/* Pagination and Page Size Controls */} + + )} )} diff --git a/packages/ui/src/views/assistants/custom/CustomAssistantConfigurePreview.jsx b/packages/ui/src/views/assistants/custom/CustomAssistantConfigurePreview.jsx index b9b4326ac..057241529 100644 --- a/packages/ui/src/views/assistants/custom/CustomAssistantConfigurePreview.jsx +++ b/packages/ui/src/views/assistants/custom/CustomAssistantConfigurePreview.jsx @@ -39,7 +39,9 @@ import ViewLeadsDialog from '@/ui-component/dialog/ViewLeadsDialog' import Settings from '@/views/settings' import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' import PromptGeneratorDialog from '@/ui-component/dialog/PromptGeneratorDialog' +import { Available } from '@/ui-component/rbac/available' import ExpandTextDialog from '@/ui-component/dialog/ExpandTextDialog' +import { SwitchInput } from '@/ui-component/switch/Switch' // API import assistantsApi from '@/api/assistants' @@ -52,7 +54,7 @@ import { baseURL } from '@/store/constant' import { SET_CHATFLOW, closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction } from '@/store/actions' // Utils -import { initNode } from '@/utils/genericHelper' +import { initNode, showHideInputParams } from '@/utils/genericHelper' import useNotifier from '@/utils/useNotifier' import { toolAgentFlow } from './toolAgentFlow' @@ -126,6 +128,28 @@ const CustomAssistantConfigurePreview = () => { const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + const handleChatModelDataChange = ({ inputParam, newValue }) => { + setSelectedChatModel((prevData) => { + const updatedData = { ...prevData } + updatedData.inputs[inputParam.name] = newValue + updatedData.inputParams = showHideInputParams(updatedData) + return updatedData + }) + } + + const handleToolDataChange = + (toolIndex) => + ({ inputParam, newValue }) => { + setSelectedTools((prevTools) => { + const updatedTools = [...prevTools] + const updatedTool = { ...updatedTools[toolIndex] } + updatedTool.inputs[inputParam.name] = newValue + updatedTool.inputParams = showHideInputParams(updatedTool) + updatedTools[toolIndex] = updatedTool + return updatedTools + }) + } + const displayWarning = () => { enqueueSnackbar({ message: 'Please fill in all mandatory fields.', @@ -143,9 +167,10 @@ const CustomAssistantConfigurePreview = () => { const checkInputParamsMandatory = () => { let canSubmit = true - - const inputParams = (selectedChatModel.inputParams ?? []).filter((inputParam) => !inputParam.hidden) - for (const inputParam of inputParams) { + const visibleInputParams = showHideInputParams(selectedChatModel).filter( + (inputParam) => !inputParam.hidden && inputParam.display !== false + ) + for (const inputParam of visibleInputParams) { if (!inputParam.optional && (!selectedChatModel.inputs[inputParam.name] || !selectedChatModel.credential)) { if (inputParam.type === 'credential' && !selectedChatModel.credential) { canSubmit = false @@ -160,8 +185,10 @@ const CustomAssistantConfigurePreview = () => { if (selectedTools.length > 0) { for (let i = 0; i < selectedTools.length; i++) { const tool = selectedTools[i] - const inputParams = (tool.inputParams ?? []).filter((inputParam) => !inputParam.hidden) - for (const inputParam of inputParams) { + const visibleInputParams = showHideInputParams(tool).filter( + (inputParam) => !inputParam.hidden && inputParam.display !== false + ) + for (const inputParam of visibleInputParams) { if (!inputParam.optional && (!tool.inputs[inputParam.name] || !tool.credential)) { if (inputParam.type === 'credential' && !tool.credential) { canSubmit = false @@ -328,6 +355,7 @@ const CustomAssistantConfigurePreview = () => { const retrieverToolNodeData = cloneDeep(initNode(retrieverToolNode.data, retrieverToolId)) set(docStoreVSNodeData, 'inputs.selectedStore', selectedDocumentStores[i].id) + set(docStoreVSNodeData, 'outputs.output', 'retriever') const docStoreOption = documentStoreOptions.find((ds) => ds.name === selectedDocumentStores[i].id) // convert to small case and replace space with underscore @@ -341,7 +369,7 @@ const CustomAssistantConfigurePreview = () => { name, description: desc, retriever: `{{${docStoreVSId}.data.instance}}`, - returnSourceDocuments: true + returnSourceDocuments: selectedDocumentStores[i].returnSourceDocuments ?? false }) const docStoreVS = { @@ -485,7 +513,8 @@ const CustomAssistantConfigurePreview = () => { } else if (setting === 'viewMessages') { setViewMessagesDialogProps({ title: 'View Messages', - chatflow: canvas.chatflow + chatflow: canvas.chatflow, + isChatflow: false }) setViewMessagesDialogOpen(true) } else if (setting === 'viewLeads') { @@ -648,7 +677,8 @@ const CustomAssistantConfigurePreview = () => { const newDocStore = { id: docStoreId, name: foundDocumentStoreOption?.label || '', - description: foundSelectedDocumentStore?.description || foundDocumentStoreOption?.description || '' + description: foundSelectedDocumentStore?.description || foundDocumentStoreOption?.description || '', + returnSourceDocuments: foundSelectedDocumentStore?.returnSourceDocuments ?? false } newSelectedDocumentStores.push(newDocStore) @@ -866,26 +896,28 @@ const CustomAssistantConfigurePreview = () => { )} - - - - - + + + + + + + {customAssistantFlowId && !loadingAssistant && ( { )} {!customAssistantFlowId && !loadingAssistant && ( - - - - - + + + + + + + )}
    @@ -1106,6 +1140,18 @@ const CustomAssistantConfigurePreview = () => { setSelectedDocumentStores(newSelectedDocumentStores) }} /> + + Return Source Documents + + + { + const newSelectedDocumentStores = [...selectedDocumentStores] + newSelectedDocumentStores[index].returnSourceDocuments = newValue + setSelectedDocumentStores(newSelectedDocumentStores) + }} + /> ) })} @@ -1121,13 +1167,14 @@ const CustomAssistantConfigurePreview = () => { borderRadius: 2 }} > - {(selectedChatModel.inputParams ?? []) - .filter((inputParam) => !inputParam.hidden) + {showHideInputParams(selectedChatModel) + .filter((inputParam) => !inputParam.hidden && inputParam.display !== false) .map((inputParam, index) => ( ))} @@ -1212,13 +1259,16 @@ const CustomAssistantConfigurePreview = () => { mb: 1 }} > - {(tool.inputParams ?? []) - .filter((inputParam) => !inputParam.hidden) - .map((inputParam, index) => ( + {showHideInputParams(tool) + .filter( + (inputParam) => !inputParam.hidden && inputParam.display !== false + ) + .map((inputParam, inputIndex) => ( ))} @@ -1237,20 +1287,22 @@ const CustomAssistantConfigurePreview = () => { {selectedChatModel && Object.keys(selectedChatModel).length > 0 && ( - + + + )}
    diff --git a/packages/ui/src/views/assistants/custom/CustomAssistantLayout.jsx b/packages/ui/src/views/assistants/custom/CustomAssistantLayout.jsx index 2e21d6edf..6597bfeb6 100644 --- a/packages/ui/src/views/assistants/custom/CustomAssistantLayout.jsx +++ b/packages/ui/src/views/assistants/custom/CustomAssistantLayout.jsx @@ -10,9 +10,9 @@ import MainCard from '@/ui-component/cards/MainCard' import ItemCard from '@/ui-component/cards/ItemCard' import { baseURL, gridSpacing } from '@/store/constant' import AssistantEmptySVG from '@/assets/images/assistant_empty.svg' -import { StyledButton } from '@/ui-component/button/StyledButton' import AddCustomAssistantDialog from './AddCustomAssistantDialog' import ErrorBoundary from '@/ErrorBoundary' +import { StyledPermissionButton } from '@/ui-component/button/RBACButtons' // API import assistantsApi from '@/api/assistants' @@ -64,7 +64,9 @@ const CustomAssistantLayout = () => { const getImages = (details) => { const images = [] if (details && details.chatModel && details.chatModel.name) { - images.push(`${baseURL}/api/v1/node-icon/${details.chatModel.name}`) + images.push({ + imageSrc: `${baseURL}/api/v1/node-icon/${details.chatModel.name}` + }) } return images } @@ -101,14 +103,15 @@ const CustomAssistantLayout = () => { description='Create custom assistants with your choice of LLMs' onBack={() => navigate(-1)} > - } > Add - + {isLoading ? ( diff --git a/packages/ui/src/views/assistants/openai/AssistantDialog.jsx b/packages/ui/src/views/assistants/openai/AssistantDialog.jsx index cd6722ef9..7b2b3041c 100644 --- a/packages/ui/src/views/assistants/openai/AssistantDialog.jsx +++ b/packages/ui/src/views/assistants/openai/AssistantDialog.jsx @@ -21,7 +21,6 @@ import { OutlinedInput } from '@mui/material' -import { StyledButton } from '@/ui-component/button/StyledButton' import { TooltipWithParser } from '@/ui-component/tooltip/TooltipWithParser' import { Dropdown } from '@/ui-component/dropdown/Dropdown' import { MultiDropdown } from '@/ui-component/dropdown/MultiDropdown' @@ -30,6 +29,7 @@ import { File } from '@/ui-component/file/File' import { BackdropLoader } from '@/ui-component/loading/BackdropLoader' import DeleteConfirmDialog from './DeleteConfirmDialog' import AssistantVectorStoreDialog from './AssistantVectorStoreDialog' +import { StyledPermissionButton } from '@/ui-component/button/RBACButtons' // Icons import { IconX, IconPlus } from '@tabler/icons-react' @@ -205,6 +205,7 @@ const AssistantDialog = ({ show, dialogProps, onCancel, onConfirm, setError }) = useEffect(() => { if (getSpecificAssistantApi.error) { + const error = getSpecificAssistantApi.error let errMsg = '' if (error?.response?.data) { errMsg = typeof error.response.data === 'object' ? error.response.data.message : error.response.data @@ -1035,22 +1036,33 @@ const AssistantDialog = ({ show, dialogProps, onCancel, onConfirm, setError }) = {dialogProps.type === 'EDIT' && ( - onSyncClick()}> + onSyncClick()} + > Sync - + )} {dialogProps.type === 'EDIT' && ( - onDeleteClick()}> + onDeleteClick()} + > Delete - + )} - (dialogProps.type === 'ADD' ? addNewAssistant() : saveAssistant())} > {dialogProps.confirmButtonName} - + { description='Create assistants using OpenAI Assistant API' onBack={() => navigate(-1)} > - - + } > Add - + {isLoading ? ( diff --git a/packages/ui/src/views/auth/expired.jsx b/packages/ui/src/views/auth/expired.jsx new file mode 100644 index 000000000..6f893d5d6 --- /dev/null +++ b/packages/ui/src/views/auth/expired.jsx @@ -0,0 +1,46 @@ +import MainCard from '@/ui-component/cards/MainCard' +import { Box, Stack, Typography } from '@mui/material' +import contactSupport from '@/assets/images/contact_support.svg' +import { StyledButton } from '@/ui-component/button/StyledButton' + +// ==============================|| License Expired Page ||============================== // + +const LicenseExpired = () => { + return ( + <> + + + + + contact support + + + Your enterprise license has expired + + + Please contact our support team to renew your license. + + + Contact Support + + + + + + ) +} + +export default LicenseExpired diff --git a/packages/ui/src/views/auth/forgotPassword.jsx b/packages/ui/src/views/auth/forgotPassword.jsx new file mode 100644 index 000000000..9e17f2436 --- /dev/null +++ b/packages/ui/src/views/auth/forgotPassword.jsx @@ -0,0 +1,146 @@ +import { useEffect, useState } from 'react' +import { Link } from 'react-router-dom' + +// material-ui +import { Alert, Box, Stack, Typography, useTheme } from '@mui/material' + +// project imports +import { StyledButton } from '@/ui-component/button/StyledButton' +import MainCard from '@/ui-component/cards/MainCard' +import { Input } from '@/ui-component/input/Input' +import { BackdropLoader } from '@/ui-component/loading/BackdropLoader' + +// API +import accountApi from '@/api/account.api' + +// Hooks +import useApi from '@/hooks/useApi' +import { useConfig } from '@/store/context/ConfigContext' + +// utils +import useNotifier from '@/utils/useNotifier' + +// Icons +import { IconCircleCheck, IconExclamationCircle } from '@tabler/icons-react' + +// ==============================|| ForgotPasswordPage ||============================== // + +const ForgotPasswordPage = () => { + const theme = useTheme() + useNotifier() + + const usernameInput = { + label: 'Username', + name: 'username', + type: 'email', + placeholder: 'user@company.com' + } + const [usernameVal, setUsernameVal] = useState('') + const { isEnterpriseLicensed } = useConfig() + + const [isLoading, setLoading] = useState(false) + const [responseMsg, setResponseMsg] = useState(undefined) + + const forgotPasswordApi = useApi(accountApi.forgotPassword) + + const sendResetRequest = async (event) => { + event.preventDefault() + const body = { + user: { + email: usernameVal + } + } + setLoading(true) + await forgotPasswordApi.request(body) + } + + useEffect(() => { + if (forgotPasswordApi.error) { + const errMessage = + typeof forgotPasswordApi.error.response.data === 'object' + ? forgotPasswordApi.error.response.data.message + : forgotPasswordApi.error.response.data + setResponseMsg({ + type: 'error', + msg: errMessage ?? 'Failed to send instructions, please contact your administrator.' + }) + setLoading(false) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [forgotPasswordApi.error]) + + useEffect(() => { + if (forgotPasswordApi.data) { + setResponseMsg({ + type: 'success', + msg: 'Password reset instructions sent to the email.' + }) + setLoading(false) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [forgotPasswordApi.data]) + + return ( + <> + + + {responseMsg && responseMsg?.type === 'error' && ( + } variant='filled' severity='error'> + {responseMsg.msg} + + )} + {responseMsg && responseMsg?.type !== 'error' && ( + } variant='filled' severity='success'> + {responseMsg.msg} + + )} + + Forgot Password? + + Have a reset password code?{' '} + + Change your password here + + . + + +
    + + +
    + + Email * + + +
    +
    + setUsernameVal(newValue)} + value={usernameVal} + showDialog={false} + /> + {isEnterpriseLicensed && ( + + If you forgot the email you used for signing up, please contact your administrator. + + )} +
    + + Send Reset Password Instructions + +
    +
    + +
    +
    + + ) +} + +export default ForgotPasswordPage diff --git a/packages/ui/src/views/auth/login.jsx b/packages/ui/src/views/auth/login.jsx new file mode 100644 index 000000000..54eb7a833 --- /dev/null +++ b/packages/ui/src/views/auth/login.jsx @@ -0,0 +1,45 @@ +import { useEffect, useState } from 'react' + +// material-ui +import { BackdropLoader } from '@/ui-component/loading/BackdropLoader' + +// project imports +import MainCard from '@/ui-component/cards/MainCard' + +// API +import authApi from '@/api/auth' + +// Hooks +import useApi from '@/hooks/useApi' + +// ==============================|| ResolveLoginPage ||============================== // + +const ResolveLoginPage = () => { + const resolveLogin = useApi(authApi.resolveLogin) + const [loading, setLoading] = useState(false) + + useEffect(() => { + setLoading(false) + }, [resolveLogin.error]) + + useEffect(() => { + resolveLogin.request({}) + setLoading(true) + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + useEffect(() => { + setLoading(false) + if (resolveLogin.data) { + window.location.href = resolveLogin.data.redirectUrl + } + }, [resolveLogin.data]) + + return ( + <> + {loading && } + + ) +} + +export default ResolveLoginPage diff --git a/packages/ui/src/views/auth/loginActivity.jsx b/packages/ui/src/views/auth/loginActivity.jsx new file mode 100644 index 000000000..2e01078e8 --- /dev/null +++ b/packages/ui/src/views/auth/loginActivity.jsx @@ -0,0 +1,638 @@ +import { forwardRef, useEffect, useState } from 'react' +import { useDispatch, useSelector } from 'react-redux' +import moment from 'moment/moment' +import PropTypes from 'prop-types' + +// material-ui +import { + Box, + Skeleton, + Stack, + Table, + TableBody, + TableContainer, + TableHead, + TableRow, + Paper, + IconButton, + useTheme, + Checkbox, + Button, + OutlinedInput, + MenuItem, + Select, + InputLabel, + FormControl, + ListItemText, + ListItemButton +} from '@mui/material' + +// project imports +import MainCard from '@/ui-component/cards/MainCard' +import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' +import ViewHeader from '@/layout/MainLayout/ViewHeader' +import ErrorBoundary from '@/ErrorBoundary' +import { StyledTableCell, StyledTableRow } from '@/ui-component/table/TableStyles' +import DatePicker from 'react-datepicker' +import 'react-datepicker/dist/react-datepicker.css' + +// API +import auditApi from '@/api/audit' + +// Hooks +import useApi from '@/hooks/useApi' +import useConfirm from '@/hooks/useConfirm' + +// utils +import useNotifier from '@/utils/useNotifier' + +// Icons +import { IconCircleX, IconChevronLeft, IconChevronRight, IconTrash, IconX, IconLogin, IconLogout } from '@tabler/icons-react' + +// store +import { useError } from '@/store/context/ErrorContext' +import { enqueueSnackbar as enqueueSnackbarAction, closeSnackbar as closeSnackbarAction } from '@/store/actions' +import { PermissionButton } from '@/ui-component/button/RBACButtons' + +const activityTypes = [ + 'Login Success', + 'Logout Success', + 'Unknown User', + 'Incorrect Credential', + 'User Disabled', + 'No Assigned Workspace', + 'Unknown Activity' +] +const MenuProps = { + PaperProps: { + style: { + width: 160 + } + } +} +const SelectStyles = { + '& .MuiOutlinedInput-notchedOutline': { + borderRadius: 2 + } +} + +// ==============================|| Login Activity ||============================== // + +const DatePickerCustomInput = forwardRef(function DatePickerCustomInput({ value, onClick }, ref) { + return ( + + {value} + + ) +}) + +DatePickerCustomInput.propTypes = { + value: PropTypes.string, + onClick: PropTypes.func +} +const LoginActivity = () => { + const theme = useTheme() + const customization = useSelector((state) => state.customization) + const dispatch = useDispatch() + useNotifier() + const { error, setError } = useError() + + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const [isLoading, setLoading] = useState(true) + + const { confirm } = useConfirm() + + const getLoginActivityApi = useApi(auditApi.fetchLoginActivity) + const [activity, setActivity] = useState([]) + const [typeFilter, setTypeFilter] = useState([]) + const [totalRecords, setTotalRecords] = useState(0) + const [currentPage, setCurrentPage] = useState(1) + const [start, setStart] = useState(1) + const [end, setEnd] = useState(50) + const [startDate, setStartDate] = useState(new Date(new Date().setMonth(new Date().getMonth() - 1))) + const [endDate, setEndDate] = useState(new Date()) + + const [selected, setSelected] = useState([]) + + const onStartDateSelected = (date) => { + setStartDate(date) + refreshData(currentPage, date, endDate, typeFilter) + } + + const onEndDateSelected = (date) => { + setEndDate(date) + refreshData(currentPage, startDate, date, typeFilter) + } + + const onSelectAllClick = (event) => { + if (event.target.checked) { + const newSelected = activity.map((n) => n.id) + setSelected(newSelected) + return + } + setSelected([]) + } + + const handleSelect = (event, id) => { + const selectedIndex = selected.indexOf(id) + let newSelected = [] + + if (selectedIndex === -1) { + newSelected = newSelected.concat(selected, id) + } else if (selectedIndex === 0) { + newSelected = newSelected.concat(selected.slice(1)) + } else if (selectedIndex === selected.length - 1) { + newSelected = newSelected.concat(selected.slice(0, -1)) + } else if (selectedIndex > 0) { + newSelected = newSelected.concat(selected.slice(0, selectedIndex), selected.slice(selectedIndex + 1)) + } + setSelected(newSelected) + } + + const refreshData = (_page, _start, _end, _filter) => { + const activityCodes = [] + if (_filter.length > 0) { + _filter.forEach((type) => { + activityCodes.push(getActivityCode(type)) + }) + } + getLoginActivityApi.request({ + pageNo: _page, + startDate: _start, + endDate: _end, + activityCodes: activityCodes + }) + } + + const changePage = (newPage) => { + setLoading(true) + setCurrentPage(newPage) + refreshData(newPage, startDate, endDate, typeFilter) + } + + const handleTypeFilterChange = (event) => { + const { + target: { value } + } = event + let newVar = typeof value === 'string' ? value.split(',') : value + setTypeFilter(newVar) + refreshData(currentPage, startDate, endDate, newVar) + } + + function getActivityDescription(activityCode) { + switch (activityCode) { + case 0: + return 'Login Success' + case 1: + return 'Logout Success' + case -1: + return 'Unknown User' + case -2: + return 'Incorrect Credential' + case -3: + return 'User Disabled' + case -4: + return 'No Assigned Workspace' + default: + return 'Unknown Activity' + } + } + + function getActivityCode(activityDescription) { + switch (activityDescription) { + case 'Login Success': + return 0 + case 'Logout Success': + return 1 + case 'Unknown User': + return -1 + case 'Incorrect Credential': + return -2 + case 'User Disabled': + return -3 + case 'No Assigned Workspace': + return -4 + default: + return -99 + } + } + + const deleteLoginActivity = async () => { + const confirmPayload = { + title: `Delete`, + description: `Delete ${selected.length} ${selected.length > 1 ? 'records' : 'record'}? `, + confirmButtonName: 'Delete', + cancelButtonName: 'Cancel' + } + const isConfirmed = await confirm(confirmPayload) + // + if (isConfirmed) { + try { + const deleteResp = await auditApi.deleteLoginActivity({ + selected: selected + }) + if (deleteResp.data) { + enqueueSnackbar({ + message: selected.length + ' Login Activity Records Deleted Successfully', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm() + } + } catch (error) { + enqueueSnackbar({ + message: `Failed to delete records: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } + } + } + + const onConfirm = () => { + getLoginActivityApi.request() + } + + useEffect(() => { + getLoginActivityApi.request({ + pageNo: 1 + }) + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + useEffect(() => { + setLoading(getLoginActivityApi.loading) + }, [getLoginActivityApi.loading]) + + useEffect(() => { + if (getLoginActivityApi.error) { + setError(getLoginActivityApi.error) + } + }, [getLoginActivityApi.error, setError]) + + useEffect(() => { + if (getLoginActivityApi.data) { + const data = getLoginActivityApi.data + setTotalRecords(data.count) + setLoading(false) + setCurrentPage(data.currentPage) + setStart(data.currentPage * data.pageSize - (data.pageSize - 1)) + setEnd(data.currentPage * data.pageSize > data.count ? data.count : data.currentPage * data.pageSize) + setActivity(data.data) + setSelected([]) + } + }, [getLoginActivityApi.data]) + + return ( + <> + + {error ? ( + + ) : ( + + + + +
    +
    +
    + From: + onStartDateSelected(date)} + selectsStart + startDate={startDate} + endDate={endDate} + customInput={} + /> +
    +
    + To: + onEndDateSelected(date)} + selectsEnd + startDate={startDate} + endDate={endDate} + minDate={startDate} + maxDate={new Date()} + customInput={} + /> +
    +
    + + + Filter By + + + +
    +
    +
    +
    + changePage(currentPage - 1)} + style={{ marginRight: 10 }} + variant='outlined' + disabled={currentPage === 1} + > + + + Showing {Math.min(start, totalRecords)}-{end} of {totalRecords} Records + changePage(currentPage + 1)} + style={{ marginLeft: 10 }} + variant='outlined' + disabled={end >= totalRecords} + > + = totalRecords + ? '#616161' + : 'white' + : end >= totalRecords + ? '#e0e0e0' + : 'black' + } + /> + +
    + } + > + {'Delete Selected'} + +
    +
    + + + + + + + + Activity + User + Date + Method + Message + + + + {isLoading ? ( + <> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ) : ( + <> + {activity.map((item, index) => ( + + + handleSelect(event, item.id)} + /> + + +
    +
    + {item.activityCode === 0 && ( + + )} + {item.activityCode === 1 && ( + + )} + {item.activityCode < 0 && ( + + )} +
    +
    {getActivityDescription(item.activityCode)}
    +
    +
    + {item.username} + + {moment(item.attemptedDateTime).format('MMMM Do, YYYY, HH:mm')} + + + {item.loginMode ? item.loginMode : 'Email/Password'} + + {item.message} +
    + ))} + + )} +
    +
    +
    +
    +
    +
    + )} +
    + + + ) +} + +export default LoginActivity diff --git a/packages/ui/src/views/auth/register.jsx b/packages/ui/src/views/auth/register.jsx new file mode 100644 index 000000000..30c18b12e --- /dev/null +++ b/packages/ui/src/views/auth/register.jsx @@ -0,0 +1,472 @@ +import { useEffect, useState } from 'react' +import { Link, useNavigate, useSearchParams } from 'react-router-dom' +import { z } from 'zod' + +// material-ui +import { Alert, Box, Button, Divider, Icon, List, ListItemText, OutlinedInput, Stack, Typography, useTheme } from '@mui/material' + +// project imports +import { StyledButton } from '@/ui-component/button/StyledButton' +import { Input } from '@/ui-component/input/Input' +import { BackdropLoader } from '@/ui-component/loading/BackdropLoader' + +// API +import accountApi from '@/api/account.api' +import loginMethodApi from '@/api/loginmethod' +import ssoApi from '@/api/sso' + +// Hooks +import useApi from '@/hooks/useApi' +import { useConfig } from '@/store/context/ConfigContext' + +// utils +import useNotifier from '@/utils/useNotifier' +import { passwordSchema } from '@/utils/validation' + +// Icons +import Auth0SSOLoginIcon from '@/assets/images/auth0.svg' +import GithubSSOLoginIcon from '@/assets/images/github.svg' +import GoogleSSOLoginIcon from '@/assets/images/google.svg' +import AzureSSOLoginIcon from '@/assets/images/microsoft-azure.svg' +import { store } from '@/store' +import { loginSuccess } from '@/store/reducers/authSlice' +import { IconCircleCheck, IconExclamationCircle } from '@tabler/icons-react' + +// ==============================|| Register ||============================== // + +// IMPORTANT: when updating this schema, update the schema on the server as well +// packages/server/src/enterprise/Interface.Enterprise.ts +const RegisterEnterpriseUserSchema = z + .object({ + username: z.string().min(1, 'Name is required'), + email: z.string().min(1, 'Email is required').email('Invalid email address'), + password: passwordSchema, + confirmPassword: z.string().min(1, 'Confirm Password is required'), + token: z.string().min(1, 'Invite Code is required') + }) + .refine((data) => data.password === data.confirmPassword, { + message: "Passwords don't match", + path: ['confirmPassword'] + }) + +const RegisterCloudUserSchema = z + .object({ + username: z.string().min(1, 'Name is required'), + email: z.string().min(1, 'Email is required').email('Invalid email address'), + password: passwordSchema, + confirmPassword: z.string().min(1, 'Confirm Password is required') + }) + .refine((data) => data.password === data.confirmPassword, { + message: "Passwords don't match", + path: ['confirmPassword'] + }) + +const RegisterPage = () => { + const theme = useTheme() + useNotifier() + const { isEnterpriseLicensed, isCloud, isOpenSource } = useConfig() + + const usernameInput = { + label: 'Username', + name: 'username', + type: 'text', + placeholder: 'John Doe' + } + + const passwordInput = { + label: 'Password', + name: 'password', + type: 'password', + placeholder: '********' + } + + const confirmPasswordInput = { + label: 'Confirm Password', + name: 'confirmPassword', + type: 'password', + placeholder: '********' + } + + const emailInput = { + label: 'EMail', + name: 'email', + type: 'email', + placeholder: 'user@company.com' + } + + const inviteCodeInput = { + label: 'Invite Code', + name: 'inviteCode', + type: 'text' + } + + const [params] = useSearchParams() + + const [email, setEmail] = useState('') + const [password, setPassword] = useState('') + const [confirmPassword, setConfirmPassword] = useState('') + const [token, setToken] = useState(params.get('token') ?? '') + const [username, setUsername] = useState('') + const [configuredSsoProviders, setConfiguredSsoProviders] = useState([]) + + const [loading, setLoading] = useState(false) + const [authError, setAuthError] = useState('') + const [successMsg, setSuccessMsg] = useState(undefined) + + const registerApi = useApi(accountApi.registerAccount) + const ssoLoginApi = useApi(ssoApi.ssoLogin) + const getDefaultProvidersApi = useApi(loginMethodApi.getDefaultLoginMethods) + const navigate = useNavigate() + + const register = async (event) => { + event.preventDefault() + if (isEnterpriseLicensed) { + const result = RegisterEnterpriseUserSchema.safeParse({ + username, + email, + token, + password, + confirmPassword + }) + if (result.success) { + setLoading(true) + const body = { + user: { + name: username, + email, + credential: password, + tempToken: token + } + } + await registerApi.request(body) + } else { + const errorMessages = result.error.errors.map((err) => err.message) + setAuthError(errorMessages.join(', ')) + } + } else if (isCloud) { + const formData = new FormData(event.target) + const referral = formData.get('referral') + const result = RegisterCloudUserSchema.safeParse({ + username, + email, + password, + confirmPassword + }) + if (result.success) { + setLoading(true) + const body = { + user: { + name: username, + email, + credential: password + } + } + if (referral) { + body.user.referral = referral + } + await registerApi.request(body) + } else { + const errorMessages = result.error.errors.map((err) => err.message) + setAuthError(errorMessages.join(', ')) + } + } + } + + const signInWithSSO = (ssoProvider) => { + //ssoLoginApi.request(ssoProvider) + window.location.href = `/api/v1/${ssoProvider}/login` + } + + useEffect(() => { + if (registerApi.error) { + if (isEnterpriseLicensed) { + setAuthError( + `Error in registering user. Please contact your administrator. (${registerApi.error?.response?.data?.message})` + ) + } else if (isCloud) { + setAuthError(`Error in registering user. Please try again.`) + } + setLoading(false) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [registerApi.error]) + + useEffect(() => { + if (!isOpenSource) { + getDefaultProvidersApi.request() + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + useEffect(() => { + if (ssoLoginApi.data) { + store.dispatch(loginSuccess(ssoLoginApi.data)) + navigate(location.state?.path || '/') + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [ssoLoginApi.data]) + + useEffect(() => { + if (ssoLoginApi.error) { + if (ssoLoginApi.error?.response?.status === 401 && ssoLoginApi.error?.response?.data.redirectUrl) { + window.location.href = ssoLoginApi.error.response.data.redirectUrl + } else { + setAuthError(ssoLoginApi.error.message) + } + } + }, [ssoLoginApi.error]) + + useEffect(() => { + if (getDefaultProvidersApi.data && getDefaultProvidersApi.data.providers) { + //data is an array of objects, store only the provider attribute + setConfiguredSsoProviders(getDefaultProvidersApi.data.providers.map((provider) => provider)) + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getDefaultProvidersApi.data]) + + useEffect(() => { + if (registerApi.data) { + setLoading(false) + setAuthError(undefined) + setConfirmPassword('') + setPassword('') + setToken('') + setUsername('') + setEmail('') + if (isEnterpriseLicensed) { + setSuccessMsg('Registration Successful. You will be redirected to the sign in page shortly.') + } else if (isCloud) { + setSuccessMsg('To complete your registration, please click on the verification link we sent to your email address') + } + setTimeout(() => { + navigate('/signin') + }, 3000) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [registerApi.data]) + + return ( + <> + + + {authError && ( + } variant='filled' severity='error'> + {authError.split(', ').length > 0 ? ( + + {authError.split(', ').map((error, index) => ( + + ))} + + ) : ( + authError + )} + + )} + {successMsg && ( + } variant='filled' severity='success'> + {successMsg} + + )} + + Sign Up + + Already have an account?{' '} + + Sign In + + . + + +
    + + +
    + + Full Name * + +
    +
    + setUsername(newValue)} + value={username} + showDialog={false} + /> + + Is used for display purposes only. + +
    + +
    + + Email * + +
    +
    + setEmail(newValue)} + value={email} + showDialog={false} + /> + + Kindly use a valid email address. Will be used as login id. + +
    + {isEnterpriseLicensed && ( + +
    + + Invite Code * + +
    +
    + setToken(e.target.value)} + value={token} + /> + + Please copy the token you would have received in your email. + +
    + )} + +
    + + Password * + +
    +
    + setPassword(newValue)} value={password} /> + + + Password must be at least 8 characters long and contain at least one lowercase letter, one uppercase + letter, one digit, and one special character. + + +
    + +
    + + Confirm Password * + +
    +
    + setConfirmPassword(newValue)} + value={confirmPassword} + /> + + Confirm your password. Must match the password typed above. + +
    + + Create Account + + {configuredSsoProviders.length > 0 && OR} + {configuredSsoProviders && + configuredSsoProviders.map( + (ssoProvider) => + //https://learn.microsoft.com/en-us/entra/identity-platform/howto-add-branding-in-apps + ssoProvider === 'azure' && ( + + ) + )} + {configuredSsoProviders && + configuredSsoProviders.map( + (ssoProvider) => + ssoProvider === 'google' && ( + + ) + )} + {configuredSsoProviders && + configuredSsoProviders.map( + (ssoProvider) => + ssoProvider === 'auth0' && ( + + ) + )} + {configuredSsoProviders && + configuredSsoProviders.map( + (ssoProvider) => + ssoProvider === 'github' && ( + + ) + )} +
    +
    +
    +
    + {loading && } + + ) +} + +export default RegisterPage diff --git a/packages/ui/src/views/auth/resetPassword.jsx b/packages/ui/src/views/auth/resetPassword.jsx new file mode 100644 index 000000000..3ca33f8cd --- /dev/null +++ b/packages/ui/src/views/auth/resetPassword.jsx @@ -0,0 +1,257 @@ +import { useState } from 'react' +import { useDispatch } from 'react-redux' +import { Link, useNavigate, useSearchParams } from 'react-router-dom' + +// material-ui +import { Alert, Box, Button, OutlinedInput, Stack, Typography, useTheme } from '@mui/material' + +// project imports +import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction } from '@/store/actions' +import { StyledButton } from '@/ui-component/button/StyledButton' +import MainCard from '@/ui-component/cards/MainCard' +import { Input } from '@/ui-component/input/Input' +import { BackdropLoader } from '@/ui-component/loading/BackdropLoader' + +// API +import accountApi from '@/api/account.api' + +// utils +import useNotifier from '@/utils/useNotifier' +import { validatePassword } from '@/utils/validation' + +// Icons +import { IconExclamationCircle, IconX } from '@tabler/icons-react' + +// ==============================|| ResetPasswordPage ||============================== // + +const ResetPasswordPage = () => { + const theme = useTheme() + useNotifier() + const navigate = useNavigate() + const dispatch = useDispatch() + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const emailInput = { + label: 'Email', + name: 'email', + type: 'email', + placeholder: 'user@company.com' + } + + const passwordInput = { + label: 'Password', + name: 'password', + type: 'password', + placeholder: '********' + } + + const confirmPasswordInput = { + label: 'Confirm Password', + name: 'confirmPassword', + type: 'password', + placeholder: '********' + } + + const resetPasswordInput = { + label: 'Reset Token', + name: 'resetToken', + type: 'text' + } + + const [params] = useSearchParams() + const token = params.get('token') + + const [emailVal, setEmailVal] = useState('') + const [newPasswordVal, setNewPasswordVal] = useState('') + const [confirmPasswordVal, setConfirmPasswordVal] = useState('') + const [tokenVal, setTokenVal] = useState(token ?? '') + + const [loading, setLoading] = useState(false) + const [authErrors, setAuthErrors] = useState([]) + + const goLogin = () => { + navigate('/signin', { replace: true }) + } + + const validateAndSubmit = async (event) => { + event.preventDefault() + const validationErrors = [] + setAuthErrors([]) + if (!tokenVal) { + validationErrors.push('Token cannot be left blank!') + } + if (newPasswordVal !== confirmPasswordVal) { + validationErrors.push('New Password and Confirm Password do not match.') + } + const passwordErrors = validatePassword(newPasswordVal) + if (passwordErrors.length > 0) { + validationErrors.push(...passwordErrors) + } + if (validationErrors.length > 0) { + setAuthErrors(validationErrors) + return + } + const body = { + user: { + email: emailVal, + tempToken: tokenVal, + password: newPasswordVal + } + } + setLoading(true) + try { + const updateResponse = await accountApi.resetPassword(body) + setAuthErrors([]) + setLoading(false) + if (updateResponse.data) { + enqueueSnackbar({ + message: 'Password reset successful', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + setEmailVal('') + setTokenVal('') + setNewPasswordVal('') + setConfirmPasswordVal('') + goLogin() + } + } catch (error) { + setLoading(false) + setAuthErrors([typeof error.response.data === 'object' ? error.response.data.message : error.response.data]) + enqueueSnackbar({ + message: `Failed to reset password!`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } + } + + return ( + <> + + + {authErrors && authErrors.length > 0 && ( + } variant='filled' severity='error'> +
      + {authErrors.map((msg, key) => ( +
    • {msg}
    • + ))} +
    +
    + )} + + Reset Password + + + Back to Login + + . + + +
    + + +
    + + Email * + + +
    +
    + setEmailVal(newValue)} + value={emailVal} + showDialog={false} + /> +
    + +
    + + Reset Token * + +
    +
    + setTokenVal(e.target.value)} + value={tokenVal} + sx={{ mt: '8px' }} + /> + + Please copy the token you received in your email. + +
    + +
    + + New Password * + + +
    +
    + setNewPasswordVal(newValue)} + value={newPasswordVal} + showDialog={false} + /> + + + Password must be at least 8 characters long and contain at least one lowercase letter, one uppercase + letter, one digit, and one special character. + + +
    + +
    + + Confirm Password * + +
    +
    + setConfirmPasswordVal(newValue)} + value={confirmPasswordVal} + showDialog={false} + /> + + Confirm your new password. Must match the password typed above. + +
    + + + Update Password + +
    +
    +
    +
    + {loading && } + + ) +} + +export default ResetPasswordPage diff --git a/packages/ui/src/views/auth/signIn.jsx b/packages/ui/src/views/auth/signIn.jsx new file mode 100644 index 000000000..1e0a7d3cf --- /dev/null +++ b/packages/ui/src/views/auth/signIn.jsx @@ -0,0 +1,339 @@ +import { useEffect, useState } from 'react' +import { useSelector } from 'react-redux' +import { Link, useLocation, useNavigate } from 'react-router-dom' + +// material-ui +import { Stack, useTheme, Typography, Box, Alert, Button, Divider, Icon } from '@mui/material' +import { IconExclamationCircle } from '@tabler/icons-react' +import { LoadingButton } from '@mui/lab' + +// project imports +import MainCard from '@/ui-component/cards/MainCard' +import { Input } from '@/ui-component/input/Input' + +// Hooks +import useApi from '@/hooks/useApi' +import { useConfig } from '@/store/context/ConfigContext' + +// API +import authApi from '@/api/auth' +import accountApi from '@/api/account.api' +import loginMethodApi from '@/api/loginmethod' +import ssoApi from '@/api/sso' + +// utils +import useNotifier from '@/utils/useNotifier' + +// store +import { loginSuccess, logoutSuccess } from '@/store/reducers/authSlice' +import { store } from '@/store' + +// icons +import AzureSSOLoginIcon from '@/assets/images/microsoft-azure.svg' +import GoogleSSOLoginIcon from '@/assets/images/google.svg' +import Auth0SSOLoginIcon from '@/assets/images/auth0.svg' +import GithubSSOLoginIcon from '@/assets/images/github.svg' + +// ==============================|| SignInPage ||============================== // + +const SignInPage = () => { + const theme = useTheme() + useSelector((state) => state.customization) + useNotifier() + const { isEnterpriseLicensed, isCloud, isOpenSource } = useConfig() + + const usernameInput = { + label: 'Username', + name: 'username', + type: 'email', + placeholder: 'user@company.com' + } + const passwordInput = { + label: 'Password', + name: 'password', + type: 'password', + placeholder: '********' + } + const [usernameVal, setUsernameVal] = useState('') + const [passwordVal, setPasswordVal] = useState('') + const [configuredSsoProviders, setConfiguredSsoProviders] = useState([]) + const [authError, setAuthError] = useState(undefined) + const [loading, setLoading] = useState(false) + const [showResendButton, setShowResendButton] = useState(false) + const [successMessage, setSuccessMessage] = useState('') + + const loginApi = useApi(authApi.login) + const ssoLoginApi = useApi(ssoApi.ssoLogin) + const getDefaultProvidersApi = useApi(loginMethodApi.getDefaultLoginMethods) + const navigate = useNavigate() + const location = useLocation() + const resendVerificationApi = useApi(accountApi.resendVerificationEmail) + + const doLogin = (event) => { + event.preventDefault() + setLoading(true) + const body = { + email: usernameVal, + password: passwordVal + } + loginApi.request(body) + } + + useEffect(() => { + if (loginApi.error) { + setLoading(false) + if (loginApi.error.response.status === 401 && loginApi.error.response.data.redirectUrl) { + window.location.href = loginApi.error.response.data.data.redirectUrl + } else { + setAuthError(loginApi.error.response.data.message) + } + } + }, [loginApi.error]) + + useEffect(() => { + store.dispatch(logoutSuccess()) + if (!isOpenSource) { + getDefaultProvidersApi.request() + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + useEffect(() => { + // Parse the "user" query parameter from the URL + const queryParams = new URLSearchParams(location.search) + const errorData = queryParams.get('error') + if (!errorData) return + const parsedErrorData = JSON.parse(decodeURIComponent(errorData)) + setAuthError(parsedErrorData.message) + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [location.search]) + + useEffect(() => { + if (loginApi.data) { + setLoading(false) + store.dispatch(loginSuccess(loginApi.data)) + navigate(location.state?.path || '/') + //navigate(0) + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [loginApi.data]) + + useEffect(() => { + if (ssoLoginApi.data) { + store.dispatch(loginSuccess(ssoLoginApi.data)) + navigate(location.state?.path || '/') + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [ssoLoginApi.data]) + + useEffect(() => { + if (ssoLoginApi.error) { + if (ssoLoginApi.error?.response?.status === 401 && ssoLoginApi.error?.response?.data.redirectUrl) { + window.location.href = ssoLoginApi.error.response.data.redirectUrl + } else { + setAuthError(ssoLoginApi.error.message) + } + } + }, [ssoLoginApi.error]) + + useEffect(() => { + if (getDefaultProvidersApi.data && getDefaultProvidersApi.data.providers) { + //data is an array of objects, store only the provider attribute + setConfiguredSsoProviders(getDefaultProvidersApi.data.providers.map((provider) => provider)) + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getDefaultProvidersApi.data]) + + useEffect(() => { + if (authError === 'User Email Unverified') { + setShowResendButton(true) + } else { + setShowResendButton(false) + } + }, [authError]) + + const signInWithSSO = (ssoProvider) => { + window.location.href = `/api/v1/${ssoProvider}/login` + } + + const handleResendVerification = async () => { + try { + await resendVerificationApi.request({ email: usernameVal }) + setAuthError(undefined) + setSuccessMessage('Verification email has been sent successfully.') + setShowResendButton(false) + } catch (error) { + setAuthError(error.response?.data?.message || 'Failed to send verification email.') + } + } + + return ( + <> + + + {successMessage && ( + setSuccessMessage('')}> + {successMessage} + + )} + {authError && ( + } variant='filled' severity='error'> + {authError} + + )} + {showResendButton && ( + + + + )} + + Sign In + {isCloud && ( + + Don't have an account?{' '} + + Sign up for free + + . + + )} + {isEnterpriseLicensed && ( + + Have an invite code?{' '} + + Sign up for an account + + . + + )} + +
    + + +
    + + Email * + +
    +
    + setUsernameVal(newValue)} + value={usernameVal} + showDialog={false} + /> +
    + +
    + + Password * + +
    +
    + setPasswordVal(newValue)} value={passwordVal} /> + + + Forgot password? + + +
    + + Login + + {configuredSsoProviders && configuredSsoProviders.length > 0 && OR} + {configuredSsoProviders && + configuredSsoProviders.map( + (ssoProvider) => + //https://learn.microsoft.com/en-us/entra/identity-platform/howto-add-branding-in-apps + ssoProvider === 'azure' && ( + + ) + )} + {configuredSsoProviders && + configuredSsoProviders.map( + (ssoProvider) => + ssoProvider === 'google' && ( + + ) + )} + {configuredSsoProviders && + configuredSsoProviders.map( + (ssoProvider) => + ssoProvider === 'auth0' && ( + + ) + )} + {configuredSsoProviders && + configuredSsoProviders.map( + (ssoProvider) => + ssoProvider === 'github' && ( + + ) + )} +
    +
    +
    +
    + + ) +} + +export default SignInPage diff --git a/packages/ui/src/views/auth/ssoConfig.jsx b/packages/ui/src/views/auth/ssoConfig.jsx new file mode 100644 index 000000000..b57a42eeb --- /dev/null +++ b/packages/ui/src/views/auth/ssoConfig.jsx @@ -0,0 +1,1028 @@ +import { useEffect, useState } from 'react' +import { useDispatch, useSelector } from 'react-redux' +import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction } from '@/store/actions' + +// material-ui +import { Popover, IconButton, Stack, Typography, Box, OutlinedInput, Button, Tabs, Tab, Divider } from '@mui/material' +import { useTheme } from '@mui/material/styles' + +// project imports +import MainCard from '@/ui-component/cards/MainCard' +import ViewHeader from '@/layout/MainLayout/ViewHeader' +import ErrorBoundary from '@/ErrorBoundary' +import { BackdropLoader } from '@/ui-component/loading/BackdropLoader' +import { SwitchInput } from '@/ui-component/switch/Switch' +import { PermissionButton, StyledPermissionButton } from '@/ui-component/button/RBACButtons' +import { TabPanel } from '@/ui-component/tabs/TabPanel' + +// API +import loginMethodApi from '@/api/loginmethod' +import useApi from '@/hooks/useApi' + +// utils +import useNotifier from '@/utils/useNotifier' +import { useError } from '@/store/context/ErrorContext' + +// Icons +import { IconAlertTriangle, IconX, IconCopy } from '@tabler/icons-react' +import MicrosoftSVG from '@/assets/images/microsoft-azure.svg' +import GoogleSVG from '@/assets/images/google.svg' +import Auth0SVG from '@/assets/images/auth0.svg' +import GithubSVG from '@/assets/images/github.svg' + +// const +import { gridSpacing } from '@/store/constant' + +const SSOConfigPage = () => { + useNotifier() + const { error, setError } = useError() + const theme = useTheme() + + const dispatch = useDispatch() + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const [azureConfigEnabled, setAzureConfigEnabled] = useState(false) + const [azureTenantID, setAzureTenantID] = useState('') + const [azureClientID, setAzureClientID] = useState('') + const [azureClientSecret, setAzureClientSecret] = useState('') + const [azureCallbackURL, setAzureCallbackURL] = useState('') + + const [googleConfigEnabled, setGoogleConfigEnabled] = useState(false) + const [googleClientID, setGoogleClientID] = useState('') + const [googleClientSecret, setGoogleClientSecret] = useState('') + const [googleCallbackURL, setGoogleCallbackURL] = useState('') + + const [githubConfigEnabled, setGithubConfigEnabled] = useState(false) + const [githubClientID, setGithubClientID] = useState('') + const [githubClientSecret, setGithubClientSecret] = useState('') + const [githubCallbackURL, setGithubCallbackURL] = useState('') + + const [auth0ConfigEnabled, setAuth0ConfigEnabled] = useState(false) + const [auth0Domain, setAuth0Domain] = useState('') + const [auth0ClientID, setAuth0ClientID] = useState('') + const [auth0ClientSecret, setAuth0ClientSecret] = useState('') + const [auth0CallbackURL, setAuth0CallbackURL] = useState('') + + const [loading, setLoading] = useState(false) + const [authErrors, setAuthErrors] = useState([]) + + const getLoginMethodsApi = useApi(loginMethodApi.getLoginMethods) + const [tabValue, setTabValue] = useState(0) + + const [copyAnchorEl, setCopyAnchorEl] = useState(null) + const openCopyPopOver = Boolean(copyAnchorEl) + + const currentUser = useSelector((state) => state.auth.user) + + const handleCloseCopyPopOver = () => { + setCopyAnchorEl(null) + } + + const validateAzureFields = (validationErrors) => { + if (!azureTenantID) { + validationErrors.push('Azure TenantID cannot be left blank!') + } + if (!azureClientID) { + validationErrors.push('Azure ClientID cannot be left blank!') + } + if (!azureClientSecret) { + validationErrors.push('Azure Client Secret cannot be left blank!') + } + } + const validateGoogleFields = (validationErrors) => { + if (!googleClientID) { + validationErrors.push('Google ClientID cannot be left blank!') + } + if (!googleClientSecret) { + validationErrors.push('Google Client Secret cannot be left blank!') + } + } + + const validateGithubFields = (validationErrors) => { + if (!githubClientID) { + validationErrors.push('Github ClientID cannot be left blank!') + } + if (!githubClientSecret) { + validationErrors.push('Github Client Secret cannot be left blank!') + } + } + + const validateAuth0Fields = (validationErrors) => { + if (!auth0Domain) { + validationErrors.push('Auth0 Domain cannot be left blank!') + } + if (!auth0ClientID) { + validationErrors.push('Auth0 ClientID cannot be left blank!') + } + if (!auth0ClientSecret) { + validationErrors.push('Auth0 Client Secret cannot be left blank!') + } + } + + const validateFields = () => { + const validationErrors = [] + setAuthErrors([]) + if (azureConfigEnabled) { + validateAzureFields(validationErrors) + } + if (googleConfigEnabled) { + validateGoogleFields(validationErrors) + } + if (auth0ConfigEnabled) { + validateAuth0Fields(validationErrors) + } + if (githubConfigEnabled) { + validateGithubFields(validationErrors) + } + return validationErrors + } + + function constructRequestBody() { + const body = { + organizationId: currentUser.activeOrganizationId, + userId: currentUser.id, + providers: [ + { + providerLabel: 'Microsoft', + providerName: 'azure', + config: { + tenantID: azureTenantID, + clientID: azureClientID, + clientSecret: azureClientSecret + }, + status: azureConfigEnabled ? 'enable' : 'disable' + }, + { + providerLabel: 'Google', + providerName: 'google', + config: { + clientID: googleClientID, + clientSecret: googleClientSecret + }, + status: googleConfigEnabled ? 'enable' : 'disable' + }, + { + providerLabel: 'Auth0', + providerName: 'auth0', + config: { + domain: auth0Domain, + clientID: auth0ClientID, + clientSecret: auth0ClientSecret + }, + status: auth0ConfigEnabled ? 'enable' : 'disable' + }, + { + providerLabel: 'Github', + providerName: 'github', + config: { + clientID: githubClientID, + clientSecret: githubClientSecret + }, + status: githubConfigEnabled ? 'enable' : 'disable' + } + ] + } + return body + } + + const validateAndSubmit = async () => { + const validationErrors = validateFields() + if (validationErrors.length > 0) { + setAuthErrors(validationErrors) + return + } + setLoading(true) + try { + const updateResponse = await loginMethodApi.updateLoginMethods(constructRequestBody()) + setAuthErrors([]) + setLoading(false) + if (updateResponse.data) { + enqueueSnackbar({ + message: 'SSO Configuration Updated!', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + } + } catch (error) { + setLoading(false) + setAuthErrors([typeof error.response.data === 'object' ? error.response.data.message : error.response.data]) + enqueueSnackbar({ + message: `Failed to update SSO Configuration.`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } + } + + const validateAndTest = async (providerName) => { + let validationErrors = [] + switch (providerName) { + case 'Azure': + validateAzureFields(validationErrors) + break + case 'Google': + validateGoogleFields(validationErrors) + break + case 'Auth0': + validateAuth0Fields(validationErrors) + break + case 'Gtihub': + validateGithubFields(validationErrors) + break + } + if (validationErrors.length > 0) { + setAuthErrors(validationErrors) + return + } + const body = constructRequestBody() + // depending on the tab value, we need to set the provider name and remove the other provider + body.providers = [body.providers[tabValue]] + body.providerName = providerName.toLowerCase() + setLoading(true) + try { + const updateResponse = await loginMethodApi.testLoginMethod(body) + setAuthErrors([]) + setLoading(false) + if (updateResponse.data?.message) { + enqueueSnackbar({ + message: `${getSelectedProviderName()} SSO Configuration is Valid!`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + } + if (updateResponse.data.error) { + enqueueSnackbar({ + message: `${updateResponse.data.error}`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } + } catch (error) { + setLoading(false) + setAuthErrors([typeof error.response.data === 'object' ? error.response.data.message : error.response.data]) + enqueueSnackbar({ + message: `Failed to verify ${getSelectedProviderName()} SSO Configuration.`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } + } + + const handleAzureChange = (value) => { + setAzureConfigEnabled(value) + } + + const handleGoogleChange = (value) => { + setGoogleConfigEnabled(value) + } + + const handleAuth0Change = (value) => { + setAuth0ConfigEnabled(value) + } + + const handleGithubChange = (value) => { + setGithubConfigEnabled(value) + } + + const getSelectedProviderName = () => { + switch (tabValue) { + case 0: + return 'Azure' + case 1: + return 'Google' + case 2: + return 'Auth0' + case 3: + return 'Github' + } + } + + useEffect(() => { + if (getLoginMethodsApi.data) { + const data = getLoginMethodsApi.data + const azureConfig = data.providers.find((provider) => provider.name === 'azure') + const azureCallback = data.callbacks.find((callback) => callback.providerName === 'azure') + if (azureCallback) { + setAzureCallbackURL(azureCallback.callbackURL) + } + if (azureConfig) { + setAzureTenantID(azureConfig.config.tenantID) + setAzureClientID(azureConfig.config.clientID) + setAzureClientSecret(azureConfig.config.clientSecret) + setAzureConfigEnabled(azureConfig.status === 'enable') + } + const googleConfig = data.providers.find((provider) => provider.name === 'google') + const googleCallback = data.callbacks.find((callback) => callback.providerName === 'google') + if (googleCallback) { + setGoogleCallbackURL(googleCallback.callbackURL) + } + if (googleConfig) { + setGoogleClientID(googleConfig.config.clientID) + setGoogleClientSecret(googleConfig.config.clientSecret) + setGoogleConfigEnabled(googleConfig.status === 'enable') + } + const auth0Config = data.providers.find((provider) => provider.name === 'auth0') + const auth0Callback = data.callbacks.find((callback) => callback.providerName === 'auth0') + if (auth0Callback) { + setAuth0CallbackURL(auth0Callback.callbackURL) + } + + if (auth0Config) { + setAuth0Domain(auth0Config.config.domain) + setAuth0ClientID(auth0Config.config.clientID) + setAuth0ClientSecret(auth0Config.config.clientSecret) + setAuth0ConfigEnabled(auth0Config.status === 'enable') + } + + const githubConfig = data.providers.find((provider) => provider.name === 'github') + const githubCallback = data.callbacks.find((callback) => callback.providerName === 'github') + if (githubCallback) { + setGithubCallbackURL(githubCallback.callbackURL) + } + if (githubConfig) { + setGithubClientID(githubConfig.config.clientID) + setGithubClientSecret(githubConfig.config.clientSecret) + setGithubConfigEnabled(githubConfig.status === 'enable') + } + setLoading(false) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getLoginMethodsApi.data]) + + useEffect(() => { + if (getLoginMethodsApi.error) { + setLoading(false) + setError(getLoginMethodsApi.error) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getLoginMethodsApi.error]) + + useEffect(() => { + setLoading(true) + getLoginMethodsApi.request(currentUser.activeOrganizationId) + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + return ( + <> + + {error ? ( + + ) : ( + + + {authErrors && authErrors.length > 0 && ( +
    + + + + + +
      + {authErrors.map((msg, key) => ( + +
    • {msg}
    • +
      + ))} +
    +
    +
    +
    + )} + setTabValue(val)} aria-label='tabs'> + } + sx={{ + minHeight: '40px', + height: '40px', + display: 'flex', + alignItems: 'center', + mb: 1 + }} + value={0} + label={ + <> + Microsoft + {azureConfigEnabled && ( +
    +
    +
    + )} + + } + /> + } + sx={{ + minHeight: '40px', + height: '40px', + display: 'flex', + alignItems: 'center', + mb: 1 + }} + value={1} + label={ + <> + Google + {googleConfigEnabled && ( +
    +
    +
    + )} + + } + /> + } + sx={{ + minHeight: '40px', + height: '40px', + display: 'flex', + alignItems: 'center', + mb: 1 + }} + value={2} + label={ + <> + Auth0 + {auth0ConfigEnabled && ( +
    +
    +
    + )} + + } + /> + } + sx={{ + minHeight: '40px', + height: '40px', + display: 'flex', + alignItems: 'center', + mb: 1 + }} + value={3} + label={ + <> + Github + {githubConfigEnabled && ( +
    +
    +
    + )} + + } + /> + + + + + Enable SSO Login + + + + + + {azureCallbackURL} + + { + navigator.clipboard.writeText(azureCallbackURL) + setCopyAnchorEl(event.currentTarget) + setTimeout(() => { + handleCloseCopyPopOver() + }, 1500) + }} + > + + + + + +
    + Tenant ID +
    +
    + setAzureTenantID(e.target.value)} + value={azureTenantID} + /> +
    + +
    + + Client ID * + +
    +
    + setAzureClientID(e.target.value)} + value={azureClientID} + /> +
    + +
    + + Client Secret * + +
    +
    + setAzureClientSecret(e.target.value)} + value={azureClientSecret} + /> +
    +
    +
    + + + + Enable SSO Login + + + + + + {googleCallbackURL} + + { + navigator.clipboard.writeText(googleCallbackURL) + setCopyAnchorEl(event.currentTarget) + setTimeout(() => { + handleCloseCopyPopOver() + }, 1500) + }} + > + + + + + +
    + + Client ID * + +
    +
    + setGoogleClientID(e.target.value)} + value={googleClientID} + /> +
    + +
    + + Client Secret * + +
    +
    + setGoogleClientSecret(e.target.value)} + value={googleClientSecret} + /> +
    +
    +
    + + + + Enable SSO Login + + + + + + {auth0CallbackURL} + + { + navigator.clipboard.writeText(auth0CallbackURL) + setCopyAnchorEl(event.currentTarget) + setTimeout(() => { + handleCloseCopyPopOver() + }, 1500) + }} + > + + + + + +
    + Auth0 Domain +
    +
    + setAuth0Domain(e.target.value)} + value={auth0Domain} + /> +
    + +
    + + Client ID * + +
    +
    + setAuth0ClientID(e.target.value)} + value={auth0ClientID} + /> +
    + +
    + + Client Secret * + +
    +
    + setAuth0ClientSecret(e.target.value)} + value={auth0ClientSecret} + /> +
    +
    +
    + + + + Enable SSO Login + + + + + + {githubCallbackURL} + + { + navigator.clipboard.writeText(githubCallbackURL) + setCopyAnchorEl(event.currentTarget) + setTimeout(() => { + handleCloseCopyPopOver() + }, 1500) + }} + > + + + + + +
    + + Client ID * + +
    +
    + setGithubClientID(e.target.value)} + value={githubClientID} + /> +
    + +
    + + Client Secret * + +
    +
    + setGithubClientSecret(e.target.value)} + value={githubClientSecret} + /> +
    +
    +
    + + + + validateAndTest(getSelectedProviderName())} + > + {'Test ' + getSelectedProviderName() + ' Configuration'} + + + validateAndSubmit()} + > + Save + + + + )} + + {loading && } + + + Copied! + + + + ) +} + +export default SSOConfigPage diff --git a/packages/ui/src/views/auth/ssoSuccess.jsx b/packages/ui/src/views/auth/ssoSuccess.jsx new file mode 100644 index 000000000..9476aa646 --- /dev/null +++ b/packages/ui/src/views/auth/ssoSuccess.jsx @@ -0,0 +1,46 @@ +import { useEffect } from 'react' +import { useLocation, useNavigate } from 'react-router-dom' +import { store } from '@/store' +import { loginSuccess } from '@/store/reducers/authSlice' +import authApi from '@/api/auth' + +const SSOSuccess = () => { + const location = useLocation() + const navigate = useNavigate() + + useEffect(() => { + const run = async () => { + const queryParams = new URLSearchParams(location.search) + const token = queryParams.get('token') + + if (token) { + try { + const user = await authApi.ssoSuccess(token) + if (user) { + if (user.status === 200) { + store.dispatch(loginSuccess(user.data)) + navigate('/') + } else { + navigate('/login') + } + } else { + navigate('/login') + } + } catch (error) { + navigate('/login') + } + } + } + run() + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [location.search]) + + return ( +
    +

    Loading dashboard...

    +

    Loading data...

    +
    + ) +} + +export default SSOSuccess diff --git a/packages/ui/src/views/auth/unauthorized.jsx b/packages/ui/src/views/auth/unauthorized.jsx new file mode 100644 index 000000000..492007741 --- /dev/null +++ b/packages/ui/src/views/auth/unauthorized.jsx @@ -0,0 +1,60 @@ +import MainCard from '@/ui-component/cards/MainCard' +import { Box, Stack, Typography } from '@mui/material' +import unauthorizedSVG from '@/assets/images/unauthorized.svg' +import { StyledButton } from '@/ui-component/button/StyledButton' +import { Link } from 'react-router-dom' +import { useSelector } from 'react-redux' + +// ==============================|| UnauthorizedPage ||============================== // + +const UnauthorizedPage = () => { + const currentUser = useSelector((state) => state.auth.user) + + return ( + <> + + + + + unauthorizedSVG + + + 403 Forbidden + + + You do not have permission to access this page. + + {currentUser ? ( + + Back to Home + + ) : ( + + Back to Login + + )} + + + + + ) +} + +export default UnauthorizedPage diff --git a/packages/ui/src/views/auth/verify-email.jsx b/packages/ui/src/views/auth/verify-email.jsx new file mode 100644 index 000000000..1a5d05074 --- /dev/null +++ b/packages/ui/src/views/auth/verify-email.jsx @@ -0,0 +1,124 @@ +import { useEffect } from 'react' +import { useNavigate, useSearchParams } from 'react-router-dom' + +// material-ui +import { Stack, Typography, Box, useTheme, CircularProgress } from '@mui/material' + +// project imports +import MainCard from '@/ui-component/cards/MainCard' + +// API +import accountApi from '@/api/account.api' + +// Hooks +import useApi from '@/hooks/useApi' + +// icons +import { IconCheck } from '@tabler/icons-react' +import { useState } from 'react' +import { IconX } from '@tabler/icons-react' + +const VerifyEmail = () => { + const accountVerifyApi = useApi(accountApi.verifyAccountEmail) + + const [searchParams] = useSearchParams() + const [loading, setLoading] = useState(false) + const [verificationError, setVerificationError] = useState('') + const [verificationSuccess, setVerificationSuccess] = useState(false) + const navigate = useNavigate() + + const theme = useTheme() + + useEffect(() => { + if (accountVerifyApi.data) { + setLoading(false) + setVerificationError('') + setVerificationSuccess(true) + setTimeout(() => { + navigate('/signin') + }, 3000) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [accountVerifyApi.data]) + + useEffect(() => { + if (accountVerifyApi.error) { + setLoading(false) + setVerificationError(accountVerifyApi.error) + setVerificationSuccess(false) + } + }, [accountVerifyApi.error]) + + useEffect(() => { + const token = searchParams.get('token') + if (token) { + setLoading(true) + setVerificationError('') + setVerificationSuccess(false) + accountVerifyApi.request({ user: { tempToken: token } }) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + return ( + + + + + {loading && ( + <> + + Verifying Email... + + )} + {verificationError && ( + <> + + + + Verification Failed. + + )} + {verificationSuccess && ( + <> + + + + Email Verified Successfully. + + )} + + + + + ) +} + +export default VerifyEmail diff --git a/packages/ui/src/views/canvas/AddNodes.jsx b/packages/ui/src/views/canvas/AddNodes.jsx index 35ccd81f8..903747a0d 100644 --- a/packages/ui/src/views/canvas/AddNodes.jsx +++ b/packages/ui/src/views/canvas/AddNodes.jsx @@ -116,17 +116,111 @@ const AddNodes = ({ nodesData, node, isAgentCanvas, isAgentflowv2, onFlowGenerat return nodes } + // Fuzzy search utility function that calculates similarity score + const fuzzyScore = (searchTerm, text) => { + const search = ((searchTerm ?? '') + '').trim().toLowerCase() + if (!search) return 0 + const target = ((text ?? '') + '').toLowerCase() + + let score = 0 + let searchIndex = 0 + let firstMatchIndex = -1 + let lastMatchIndex = -1 + let consecutiveMatches = 0 + + // Check for exact substring match + const exactMatchIndex = target.indexOf(search) + if (exactMatchIndex !== -1) { + score = 1000 + // Bonus for match at start of string + if (exactMatchIndex === 0) { + score += 200 + } + // Bonus for match at start of word + else if (target[exactMatchIndex - 1] === ' ' || target[exactMatchIndex - 1] === '-' || target[exactMatchIndex - 1] === '_') { + score += 100 + } + // Penalty for how far into the string the match is + score -= exactMatchIndex * 2 + // Penalty for length difference (shorter target = better match) + score -= (target.length - search.length) * 3 + return score + } + + // Fuzzy matching with character-by-character scoring + for (let i = 0; i < target.length && searchIndex < search.length; i++) { + if (target[i] === search[searchIndex]) { + // Base score for character match + score += 10 + + // Bonus for consecutive matches + if (lastMatchIndex === i - 1) { + consecutiveMatches++ + score += 5 + consecutiveMatches * 2 // Increasing bonus for longer sequences + } else { + consecutiveMatches = 0 + } + + // Bonus for match at start of string + if (i === 0) { + score += 20 + } + + // Bonus for match after space or special character (word boundary) + if (i > 0 && (target[i - 1] === ' ' || target[i - 1] === '-' || target[i - 1] === '_')) { + score += 15 + } + + if (firstMatchIndex === -1) firstMatchIndex = i + lastMatchIndex = i + searchIndex++ + } + } + + // Return 0 if not all characters were matched + if (searchIndex < search.length) { + return 0 + } + + // Penalty for length difference (favor shorter targets) + score -= Math.max(0, target.length - search.length) * 2 + // Penalty for gaps between first/last matched span + const span = lastMatchIndex - firstMatchIndex + 1 + const gaps = Math.max(0, span - search.length) + score -= gaps * 3 + + return score + } + + // Score and sort nodes by fuzzy search relevance + const scoreAndSortNodes = (nodes, searchValue) => { + // Return all nodes unsorted if search is empty + if (!searchValue || searchValue.trim() === '') { + return nodes + } + + // Calculate fuzzy scores for each node + const nodesWithScores = nodes.map((nd) => { + const nameScore = fuzzyScore(searchValue, nd.name) + const labelScore = fuzzyScore(searchValue, nd.label) + const categoryScore = fuzzyScore(searchValue, nd.category) * 0.5 // Lower weight for category + const maxScore = Math.max(nameScore, labelScore, categoryScore) + + return { node: nd, score: maxScore } + }) + + // Filter nodes with score > 0 and sort by score (highest first) + return nodesWithScores + .filter((item) => item.score > 0) + .sort((a, b) => b.score - a.score) + .map((item) => item.node) + } + const getSearchedNodes = (value) => { if (isAgentCanvas) { const nodes = nodesData.filter((nd) => !blacklistCategoriesForAgentCanvas.includes(nd.category)) nodes.push(...addException()) - const passed = nodes.filter((nd) => { - const passesName = nd.name.toLowerCase().includes(value.toLowerCase()) - const passesLabel = nd.label.toLowerCase().includes(value.toLowerCase()) - const passesCategory = nd.category.toLowerCase().includes(value.toLowerCase()) - return passesName || passesCategory || passesLabel - }) - return passed + return scoreAndSortNodes(nodes, value) } let nodes = nodesData.filter((nd) => nd.category !== 'Multi Agents' && nd.category !== 'Sequential Agents') @@ -135,13 +229,7 @@ const AddNodes = ({ nodesData, node, isAgentCanvas, isAgentflowv2, onFlowGenerat nodes = nodes.filter((nd) => !nodeNames.includes(nd.name)) } - const passed = nodes.filter((nd) => { - const passesName = nd.name.toLowerCase().includes(value.toLowerCase()) - const passesLabel = nd.label.toLowerCase().includes(value.toLowerCase()) - const passesCategory = nd.category.toLowerCase().includes(value.toLowerCase()) - return passesName || passesCategory || passesLabel - }) - return passed + return scoreAndSortNodes(nodes, value) } const filterSearch = (value, newTabValue) => { diff --git a/packages/ui/src/views/canvas/CanvasHeader.jsx b/packages/ui/src/views/canvas/CanvasHeader.jsx index 416e7ba9f..85dccf451 100644 --- a/packages/ui/src/views/canvas/CanvasHeader.jsx +++ b/packages/ui/src/views/canvas/CanvasHeader.jsx @@ -19,6 +19,7 @@ import ChatflowConfigurationDialog from '@/ui-component/dialog/ChatflowConfigura import UpsertHistoryDialog from '@/views/vectorstore/UpsertHistoryDialog' import ViewLeadsDialog from '@/ui-component/dialog/ViewLeadsDialog' import ExportAsTemplateDialog from '@/ui-component/dialog/ExportAsTemplateDialog' +import { Available } from '@/ui-component/rbac/available' // API import chatflowsApi from '@/api/chatflows' @@ -60,6 +61,8 @@ const CanvasHeader = ({ chatflow, isAgentCanvas, isAgentflowV2, handleSaveFlow, const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + const [savePermission, setSavePermission] = useState(isAgentCanvas ? 'agentflows:create' : 'chatflows:create') + const title = isAgentCanvas ? 'Agents' : 'Chatflow' const updateChatflowApi = useApi(chatflowsApi.updateChatflow) @@ -73,7 +76,8 @@ const CanvasHeader = ({ chatflow, isAgentCanvas, isAgentflowV2, handleSaveFlow, } else if (setting === 'viewMessages') { setViewMessagesDialogProps({ title: 'View Messages', - chatflow: chatflow + chatflow: chatflow, + isChatflow: isAgentflowV2 ? false : true }) setViewMessagesDialogOpen(true) } else if (setting === 'viewLeads') { @@ -203,7 +207,8 @@ const CanvasHeader = ({ chatflow, isAgentCanvas, isAgentflowV2, handleSaveFlow, chatflowApiKeyId: chatflow.apikeyid, isFormDataRequired, isSessionMemory, - isAgentCanvas + isAgentCanvas, + isAgentflowV2 }) setAPIDialogOpen(true) } @@ -215,12 +220,14 @@ const CanvasHeader = ({ chatflow, isAgentCanvas, isAgentflowV2, handleSaveFlow, const onConfirmSaveName = (flowName) => { setFlowDialogOpen(false) + setSavePermission(isAgentCanvas ? 'agentflows:update' : 'chatflows:update') handleSaveFlow(flowName) } useEffect(() => { if (updateChatflowApi.data) { setFlowName(updateChatflowApi.data.name) + setSavePermission(isAgentCanvas ? 'agentflows:update' : 'chatflows:update') dispatch({ type: SET_CHATFLOW, chatflow: updateChatflowApi.data }) } setEditingFlowName(false) @@ -289,27 +296,29 @@ const CanvasHeader = ({ chatflow, isAgentCanvas, isAgentflowV2, handleSaveFlow, {canvas.isDirty && *} {flowName} {chatflow?.id && ( - - setEditingFlowName(true)} - > - - - + + + setEditingFlowName(true)} + > + + + + )} ) : ( @@ -401,26 +410,28 @@ const CanvasHeader = ({ chatflow, isAgentCanvas, isAgentflowV2, handleSaveFlow, )} - - - - - + + + + + + + { componentNode?.deprecateMessage ?? 'This node will be deprecated in the next release. Change to a new node tagged with NEW' ) + } else if (componentNode.warning) { + setWarningMessage(componentNode.warning) } else { setWarningMessage('') } diff --git a/packages/ui/src/views/canvas/CredentialInputHandler.jsx b/packages/ui/src/views/canvas/CredentialInputHandler.jsx index ddbd83152..e713a22d1 100644 --- a/packages/ui/src/views/canvas/CredentialInputHandler.jsx +++ b/packages/ui/src/views/canvas/CredentialInputHandler.jsx @@ -12,6 +12,7 @@ import CredentialListDialog from '@/views/credentials/CredentialListDialog' // API import credentialsApi from '@/api/credentials' +import { useAuth } from '@/hooks/useAuth' import { FLOWISE_CREDENTIAL_ID } from '@/store/constant' // ===========================|| CredentialInputHandler ||=========================== // @@ -24,6 +25,7 @@ const CredentialInputHandler = ({ inputParam, data, onSelect, disabled = false } const [showSpecificCredentialDialog, setShowSpecificCredentialDialog] = useState(false) const [specificCredentialDialogProps, setSpecificCredentialDialogProps] = useState({}) const [reloadTimestamp, setReloadTimestamp] = useState(Date.now().toString()) + const { hasPermission } = useAuth() const editCredential = (credentialId) => { const dialogProp = { @@ -104,7 +106,7 @@ const CredentialInputHandler = ({ inputParam, data, onSelect, disabled = false } name={inputParam.name} nodeData={data} value={credentialId ?? 'choose an option'} - isCreateNewOption={true} + isCreateNewOption={hasPermission('credentials:create')} credentialNames={inputParam.credentialNames} onSelect={(newValue) => { setCredentialId(newValue) @@ -112,7 +114,7 @@ const CredentialInputHandler = ({ inputParam, data, onSelect, disabled = false } }} onCreateNew={() => addAsyncOption(inputParam.name)} /> - {credentialId && ( + {credentialId && hasPermission('credentials:update') && ( editCredential(credentialId)}> diff --git a/packages/ui/src/views/canvas/NodeInputHandler.jsx b/packages/ui/src/views/canvas/NodeInputHandler.jsx index bc3afa66a..04175e087 100644 --- a/packages/ui/src/views/canvas/NodeInputHandler.jsx +++ b/packages/ui/src/views/canvas/NodeInputHandler.jsx @@ -3,6 +3,8 @@ import { Handle, Position, useUpdateNodeInternals } from 'reactflow' import { useEffect, useRef, useState, useContext } from 'react' import { useSelector, useDispatch } from 'react-redux' import { cloneDeep } from 'lodash' +import showdown from 'showdown' +import parser from 'html-react-parser' // material-ui import { useTheme, styled } from '@mui/material/styles' @@ -98,6 +100,13 @@ const StyledPopper = styled(Popper)({ } }) +const markdownConverter = new showdown.Converter({ + simplifiedAutoLink: true, + strikethrough: true, + tables: true, + tasklists: true +}) + // ===========================|| NodeInputHandler ||=========================== // const NodeInputHandler = ({ @@ -958,7 +967,7 @@ const NodeInputHandler = ({ }} > - {inputParam.warning} + {parser(inputParam.warning)}
    )} {inputParam.type === 'credential' && ( @@ -1037,6 +1046,7 @@ const NodeInputHandler = ({ variant='outlined' onClick={() => { data.inputs[inputParam.name] = inputParam.codeExample + setReloadTimestamp(Date.now().toString()) }} > See Example @@ -1044,10 +1054,11 @@ const NodeInputHandler = ({ )}
    setPromptGeneratorDialogOpen(false)} onConfirm={(generatedInstruction) => { try { - data.inputs[inputParam.name] = generatedInstruction + if (inputParam?.acceptVariable && window.location.href.includes('v2/agentcanvas')) { + const htmlContent = markdownConverter.makeHtml(generatedInstruction) + data.inputs[inputParam.name] = htmlContent + } else { + data.inputs[inputParam.name] = generatedInstruction + } setPromptGeneratorDialogOpen(false) } catch (error) { enqueueSnackbar({ diff --git a/packages/ui/src/views/canvas/StickyNote.jsx b/packages/ui/src/views/canvas/StickyNote.jsx index accf1d487..938a18b78 100644 --- a/packages/ui/src/views/canvas/StickyNote.jsx +++ b/packages/ui/src/views/canvas/StickyNote.jsx @@ -3,7 +3,7 @@ import { useContext, useState, memo } from 'react' import { useSelector } from 'react-redux' // material-ui -import { useTheme } from '@mui/material/styles' +import { useTheme, darken, lighten } from '@mui/material/styles' // project imports import NodeCardWrapper from '@/ui-component/cards/NodeCardWrapper' @@ -18,6 +18,7 @@ import { flowContext } from '@/store/context/ReactFlowContext' const StickyNote = ({ data }) => { const theme = useTheme() const canvas = useSelector((state) => state.canvas) + const customization = useSelector((state) => state.customization) const { deleteNode, duplicateNode } = useContext(flowContext) const [inputParam] = data.inputParams @@ -31,12 +32,23 @@ const StickyNote = ({ data }) => { setOpen(true) } + const defaultColor = '#FFE770' // fallback color if data.color is not present + const nodeColor = data.color || defaultColor + const getBorderColor = () => { if (data.selected) return theme.palette.primary.main - else if (theme?.customization?.isDarkMode) return theme.palette.grey[900] + 25 + else if (customization?.isDarkMode) return theme.palette.grey[700] else return theme.palette.grey[900] + 50 } + const getBackgroundColor = () => { + if (customization?.isDarkMode) { + return data.selected ? darken(nodeColor, 0.7) : darken(nodeColor, 0.8) + } else { + return data.selected ? lighten(nodeColor, 0.1) : lighten(nodeColor, 0.2) + } + } + return ( <> { sx={{ padding: 0, borderColor: getBorderColor(), - backgroundColor: data.selected ? '#FFDC00' : '#FFE770' + backgroundColor: getBackgroundColor() }} border={false} > @@ -66,8 +78,12 @@ const StickyNote = ({ data }) => { onClick={() => { duplicateNode(data.id) }} - sx={{ height: '35px', width: '35px', '&:hover': { color: theme?.palette.primary.main } }} - color={theme?.customization?.isDarkMode ? theme.colors?.paper : 'inherit'} + sx={{ + height: '35px', + width: '35px', + color: customization?.isDarkMode ? 'white' : 'inherit', + '&:hover': { color: theme?.palette.primary.main } + }} > @@ -76,8 +92,12 @@ const StickyNote = ({ data }) => { onClick={() => { deleteNode(data.id) }} - sx={{ height: '35px', width: '35px', '&:hover': { color: 'red' } }} - color={theme?.customization?.isDarkMode ? theme.colors?.paper : 'inherit'} + sx={{ + height: '35px', + width: '35px', + color: customization?.isDarkMode ? 'white' : 'inherit', + '&:hover': { color: theme?.palette.error.main } + }} > diff --git a/packages/ui/src/views/canvas/index.css b/packages/ui/src/views/canvas/index.css index 3ad926786..31e7edccb 100644 --- a/packages/ui/src/views/canvas/index.css +++ b/packages/ui/src/views/canvas/index.css @@ -47,3 +47,42 @@ cursor: crosshair; background: #5dba62 !important; } + +/* Dark mode controls styling */ +.dark-mode-controls { + --xy-controls-button-background-color-default: #2d2d2d; + --xy-controls-button-background-color-hover-default: #404040; + --xy-controls-button-border-color-default: #525252; + --xy-controls-box-shadow-default: 0 0 2px 1px rgba(255, 255, 255, 0.1); +} + +.dark-mode-controls .react-flow__controls-button { + background-color: #2d2d2d; + border-color: #525252; + color: #ffffff; + border: 1px solid #525252; +} + +.dark-mode-controls .react-flow__controls-button:hover { + background-color: #404040; +} + +.dark-mode-controls .react-flow__controls-button.react-flow__controls-interactive { + background-color: #2d2d2d; + border-color: #525252; + color: #ffffff; +} + +.dark-mode-controls .react-flow__controls-button.react-flow__controls-interactive:hover { + background-color: #404040; +} + +.dark-mode-controls .react-flow__controls-button svg { + color: #ffffff; + fill: #ffffff; +} + +.dark-mode-controls .react-flow__controls-button:hover svg { + color: #ffffff; + fill: #ffffff; +} diff --git a/packages/ui/src/views/canvas/index.jsx b/packages/ui/src/views/canvas/index.jsx index 028c3c462..8835706ec 100644 --- a/packages/ui/src/views/canvas/index.jsx +++ b/packages/ui/src/views/canvas/index.jsx @@ -35,9 +35,10 @@ import chatflowsApi from '@/api/chatflows' // Hooks import useApi from '@/hooks/useApi' import useConfirm from '@/hooks/useConfirm' +import { useAuth } from '@/hooks/useAuth' // icons -import { IconX, IconRefreshAlert } from '@tabler/icons-react' +import { IconX, IconRefreshAlert, IconMagnetFilled, IconMagnetOff, IconArtboard, IconArtboardOff } from '@tabler/icons-react' // utils import { @@ -62,6 +63,7 @@ const edgeTypes = { buttonedge: ButtonEdge } const Canvas = () => { const theme = useTheme() const navigate = useNavigate() + const { hasAssignedWorkspace } = useAuth() const { state } = useLocation() const templateFlowData = state ? state.templateFlowData : '' @@ -75,6 +77,7 @@ const Canvas = () => { const { confirm } = useConfirm() const dispatch = useDispatch() + const customization = useSelector((state) => state.customization) const canvas = useSelector((state) => state.canvas) const [canvasDataStore, setCanvasDataStore] = useState(canvas) const [chatflow, setChatflow] = useState(null) @@ -94,15 +97,22 @@ const Canvas = () => { const [selectedNode, setSelectedNode] = useState(null) const [isUpsertButtonEnabled, setIsUpsertButtonEnabled] = useState(false) const [isSyncNodesButtonEnabled, setIsSyncNodesButtonEnabled] = useState(false) + const [isSnappingEnabled, setIsSnappingEnabled] = useState(false) + const [isBackgroundEnabled, setIsBackgroundEnabled] = useState(true) const reactFlowWrapper = useRef(null) + const [lastUpdatedDateTime, setLasUpdatedDateTime] = useState('') + const [chatflowName, setChatflowName] = useState('') + const [flowData, setFlowData] = useState('') + // ==============================|| Chatflow API ||============================== // const getNodesApi = useApi(nodesApi.getAllNodes) const createNewChatflowApi = useApi(chatflowsApi.createNewChatflow) const updateChatflowApi = useApi(chatflowsApi.updateChatflow) const getSpecificChatflowApi = useApi(chatflowsApi.getSpecificChatflow) + const getHasChatflowChangedApi = useApi(chatflowsApi.getHasChatflowChanged) // ==============================|| Events & Actions ||============================== // @@ -198,7 +208,7 @@ const Canvas = () => { } } - const handleSaveFlow = (chatflowName) => { + const handleSaveFlow = async (chatflowName) => { if (reactFlowInstance) { const nodes = reactFlowInstance.getNodes().map((node) => { const nodeData = cloneDeep(node.data) @@ -227,11 +237,9 @@ const Canvas = () => { } createNewChatflowApi.request(newChatflowBody) } else { - const updateBody = { - name: chatflowName, - flowData - } - updateChatflowApi.request(chatflow.id, updateBody) + setChatflowName(chatflowName) + setFlowData(flowData) + getHasChatflowChangedApi.request(chatflow.id, lastUpdatedDateTime) } } } @@ -401,7 +409,13 @@ const Canvas = () => { useEffect(() => { if (getSpecificChatflowApi.data) { const chatflow = getSpecificChatflowApi.data + const workspaceId = chatflow.workspaceId + if (!hasAssignedWorkspace(workspaceId)) { + navigate('/unauthorized') + return + } const initialFlow = chatflow.flowData ? JSON.parse(chatflow.flowData) : [] + setLasUpdatedDateTime(chatflow.updatedDate) setNodes(initialFlow.nodes || []) setEdges(initialFlow.edges || []) dispatch({ type: SET_CHATFLOW, chatflow }) @@ -420,7 +434,7 @@ const Canvas = () => { saveChatflowSuccess() window.history.replaceState(state, null, `/${isAgentCanvas ? 'agentcanvas' : 'canvas'}/${chatflow.id}`) } else if (createNewChatflowApi.error) { - errorFailed(`Failed to save ${canvasTitle}: ${createNewChatflowApi.error.response.data.message}`) + errorFailed(`Failed to retrieve ${canvasTitle}: ${createNewChatflowApi.error.response.data.message}`) } // eslint-disable-next-line react-hooks/exhaustive-deps @@ -430,14 +444,45 @@ const Canvas = () => { useEffect(() => { if (updateChatflowApi.data) { dispatch({ type: SET_CHATFLOW, chatflow: updateChatflowApi.data }) + setLasUpdatedDateTime(updateChatflowApi.data.updatedDate) saveChatflowSuccess() } else if (updateChatflowApi.error) { - errorFailed(`Failed to save ${canvasTitle}: ${updateChatflowApi.error.response.data.message}`) + errorFailed(`Failed to retrieve ${canvasTitle}: ${updateChatflowApi.error.response.data.message}`) } // eslint-disable-next-line react-hooks/exhaustive-deps }, [updateChatflowApi.data, updateChatflowApi.error]) + // check if chatflow has changed before saving + useEffect(() => { + const checkIfHasChanged = async () => { + if (getHasChatflowChangedApi.data?.hasChanged === true) { + const confirmPayload = { + title: `Confirm Change`, + description: `${canvasTitle} ${chatflow.name} has changed since you have opened, overwrite changes?`, + confirmButtonName: 'Confirm', + cancelButtonName: 'Cancel' + } + const isConfirmed = await confirm(confirmPayload) + + if (!isConfirmed) { + return + } + } + const updateBody = { + name: chatflowName, + flowData + } + updateChatflowApi.request(chatflow.id, updateBody) + } + + if (getHasChatflowChangedApi.data) { + checkIfHasChanged() + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getHasChatflowChangedApi.data, getHasChatflowChangedApi.error]) + useEffect(() => { setChatflow(canvasDataStore.chatflow) if (canvasDataStore.chatflow) { @@ -554,17 +599,41 @@ const Canvas = () => { fitView deleteKeyCode={canvas.canvasDialogShow ? null : ['Delete']} minZoom={0.1} + snapGrid={[25, 25]} + snapToGrid={isSnappingEnabled} className='chatflow-canvas' > - + > + + + + {isBackgroundEnabled && } {isSyncNodesButtonEnabled && ( { const URLpath = document.location.pathname.toString().split('/') const chatflowId = URLpath[URLpath.length - 1] === 'chatbot' ? '' : URLpath[URLpath.length - 1] - const navigate = useNavigate() const theme = useTheme() const [chatflow, setChatflow] = useState(null) const [chatbotTheme, setChatbotTheme] = useState({}) - const [loginDialogOpen, setLoginDialogOpen] = useState(false) - const [loginDialogProps, setLoginDialogProps] = useState({}) const [isLoading, setLoading] = useState(true) const [chatbotOverrideConfig, setChatbotOverrideConfig] = useState({}) const getSpecificChatflowFromPublicApi = useApi(chatflowsApi.getSpecificChatflowFromPublicEndpoint) const getSpecificChatflowApi = useApi(chatflowsApi.getSpecificChatflow) - const onLoginClick = (username, password) => { - localStorage.setItem('username', username) - localStorage.setItem('password', password) - navigate(0) - } - useEffect(() => { getSpecificChatflowFromPublicApi.request(chatflowId) // eslint-disable-next-line react-hooks/exhaustive-deps }, []) - useEffect(() => { - if (getSpecificChatflowFromPublicApi.error) { - if (getSpecificChatflowFromPublicApi.error?.response?.status === 401) { - if (localStorage.getItem('username') && localStorage.getItem('password')) { - getSpecificChatflowApi.request(chatflowId) - } else { - setLoginDialogProps({ - title: 'Login', - confirmButtonName: 'Login' - }) - setLoginDialogOpen(true) - } - } - } - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [getSpecificChatflowFromPublicApi.error]) - - useEffect(() => { - if (getSpecificChatflowApi.error) { - if (getSpecificChatflowApi.error?.response?.status === 401) { - setLoginDialogProps({ - title: 'Login', - confirmButtonName: 'Login' - }) - setLoginDialogOpen(true) - } - } - }, [getSpecificChatflowApi.error]) - useEffect(() => { if (getSpecificChatflowFromPublicApi.data || getSpecificChatflowApi.data) { const chatflowData = getSpecificChatflowFromPublicApi.data || getSpecificChatflowApi.data @@ -151,7 +109,6 @@ const ChatbotFull = () => { theme={{ chatWindow: chatbotTheme }} /> )} - ) : null} diff --git a/packages/ui/src/views/chatflows/APICodeDialog.jsx b/packages/ui/src/views/chatflows/APICodeDialog.jsx index a7ae54d9d..26acf8fe2 100644 --- a/packages/ui/src/views/chatflows/APICodeDialog.jsx +++ b/packages/ui/src/views/chatflows/APICodeDialog.jsx @@ -1,6 +1,6 @@ import { createPortal } from 'react-dom' import { useNavigate } from 'react-router-dom' -import { useState, useEffect } from 'react' +import { useState, useEffect, useMemo } from 'react' import { useDispatch, useSelector } from 'react-redux' import PropTypes from 'prop-types' @@ -21,11 +21,13 @@ import { import { CopyBlock, atomOneDark } from 'react-code-blocks' import ExpandMoreIcon from '@mui/icons-material/ExpandMore' import { useTheme } from '@mui/material/styles' +import { useAuth } from '@/hooks/useAuth' // Project import import { Dropdown } from '@/ui-component/dropdown/Dropdown' import ShareChatbot from './ShareChatbot' import EmbedChat from './EmbedChat' +import { Available } from '@/ui-component/rbac/available' // Const import { baseURL } from '@/store/constant' @@ -93,7 +95,6 @@ const APICodeDialog = ({ show, dialogProps, onCancel }) => { const codes = ['Embed', 'Python', 'JavaScript', 'cURL', 'Share Chatbot'] const [value, setValue] = useState(0) - const [keyOptions, setKeyOptions] = useState([]) const [apiKeys, setAPIKeys] = useState([]) const [chatflowApiKeyId, setChatflowApiKeyId] = useState('') const [selectedApiKey, setSelectedApiKey] = useState({}) @@ -108,6 +109,36 @@ const APICodeDialog = ({ show, dialogProps, onCancel }) => { const getIsChatflowStreamingApi = useApi(chatflowsApi.getIsChatflowStreaming) const getConfigApi = useApi(configApi.getConfig) const getAllVariablesApi = useApi(variablesApi.getAllVariables) + const isGlobal = useSelector((state) => state.auth.isGlobal) + const { hasPermission } = useAuth() + + // Memoize keyOptions to prevent recreation on hover + const keyOptions = useMemo(() => { + if (!getAllAPIKeysApi.data) return [] + + const options = [ + { + label: 'No Authorization', + name: '' + } + ] + + for (const key of getAllAPIKeysApi.data) { + options.push({ + label: key.keyName, + name: key.id + }) + } + + if (isGlobal || hasPermission('apikeys:create')) { + options.push({ + label: '- Add New Key -', + name: 'addnewkey' + }) + } + + return options + }, [getAllAPIKeysApi.data, isGlobal, hasPermission]) const onCheckBoxChanged = (newVal) => { setCheckbox(newVal) @@ -123,7 +154,8 @@ const APICodeDialog = ({ show, dialogProps, onCancel }) => { return } setChatflowApiKeyId(keyValue) - setSelectedApiKey(apiKeys.find((key) => key.id === keyValue)) + const selectedKey = apiKeys.find((key) => key.id === keyValue) + setSelectedApiKey(selectedKey || {}) const updateBody = { apikeyid: keyValue } @@ -171,26 +203,7 @@ const APICodeDialog = ({ show, dialogProps, onCancel }) => { result[node].nodeIds.sort() } setNodeConfig(result) - - if (!overrideConfigStatus) { - setNodeOverrides(newNodeOverrides) - } else { - const updatedNodeOverrides = { ...nodeOverrides } - - Object.keys(updatedNodeOverrides).forEach((node) => { - if (!seenNodes.has(node)) { - delete updatedNodeOverrides[node] - } - }) - - seenNodes.forEach((node) => { - if (!updatedNodeOverrides[node]) { - updatedNodeOverrides[node] = newNodeOverrides[node] - } - }) - - setNodeOverrides(updatedNodeOverrides) - } + setNodeOverrides(newNodeOverrides) } const groupByVariableLabel = (variables) => { @@ -597,26 +610,63 @@ query({ } const getMultiConfigCodeWithFormData = (codeLang) => { - if (codeLang === 'Python') { - return `# Specify multiple values for a config parameter by specifying the node id + if (dialogProps.isAgentflowV2) { + if (codeLang === 'Python') { + return `# Specify multiple values for a config parameter by specifying the node id +body_data = { + "agentModelConfig": { + "agentAgentflow_0": { + "openAIApiKey": "sk-my-openai-1st-key" + }, + "agentAgentflow_1": { + "openAIApiKey": "sk-my-openai-2nd-key" + } + } +}` + } else if (codeLang === 'JavaScript') { + return `// Specify multiple values for a config parameter by specifying the node id +formData.append("agentModelConfig[agentAgentflow_0][openAIApiKey]", "sk-my-openai-1st-key") +formData.append("agentModelConfig[agentAgentflow_1][openAIApiKey]", "sk-my-openai-2nd-key")` + } else if (codeLang === 'cURL') { + return `-F "agentModelConfig[agentAgentflow_0][openAIApiKey]=sk-my-openai-1st-key" \\ +-F "agentModelConfig[agentAgentflow_1][openAIApiKey]=sk-my-openai-2nd-key" \\` + } + } else { + if (codeLang === 'Python') { + return `# Specify multiple values for a config parameter by specifying the node id body_data = { "openAIApiKey": { "chatOpenAI_0": "sk-my-openai-1st-key", "openAIEmbeddings_0": "sk-my-openai-2nd-key" } }` - } else if (codeLang === 'JavaScript') { - return `// Specify multiple values for a config parameter by specifying the node id + } else if (codeLang === 'JavaScript') { + return `// Specify multiple values for a config parameter by specifying the node id formData.append("openAIApiKey[chatOpenAI_0]", "sk-my-openai-1st-key") formData.append("openAIApiKey[openAIEmbeddings_0]", "sk-my-openai-2nd-key")` - } else if (codeLang === 'cURL') { - return `-F "openAIApiKey[chatOpenAI_0]=sk-my-openai-1st-key" \\ + } else if (codeLang === 'cURL') { + return `-F "openAIApiKey[chatOpenAI_0]=sk-my-openai-1st-key" \\ -F "openAIApiKey[openAIEmbeddings_0]=sk-my-openai-2nd-key" \\` + } } } const getMultiConfigCode = () => { - return `{ + if (dialogProps.isAgentflowV2) { + return `{ + "overrideConfig": { + "agentModelConfig": { + "agentAgentflow_0": { + "openAIApiKey": "sk-my-openai-1st-key" + }, + "agentAgentflow_1": { + "openAIApiKey": "sk-my-openai-2nd-key" + } + } + } +}` + } else { + return `{ "overrideConfig": { "openAIApiKey": { "chatOpenAI_0": "sk-my-openai-1st-key", @@ -624,27 +674,11 @@ formData.append("openAIApiKey[openAIEmbeddings_0]", "sk-my-openai-2nd-key")` } } }` + } } useEffect(() => { if (getAllAPIKeysApi.data) { - const options = [ - { - label: 'No Authorization', - name: '' - } - ] - for (const key of getAllAPIKeysApi.data) { - options.push({ - label: key.keyName, - name: key.id - }) - } - options.push({ - label: '- Add New Key -', - name: 'addnewkey' - }) - setKeyOptions(options) setAPIKeys(getAllAPIKeysApi.data) if (dialogProps.chatflowApiKeyId) { @@ -693,13 +727,15 @@ formData.append("openAIApiKey[openAIEmbeddings_0]", "sk-my-openai-2nd-key")`
    - onApiKeySelected(newValue)} - value={dialogProps.chatflowApiKeyId ?? chatflowApiKeyId ?? 'Choose an API key'} - /> + + onApiKeySelected(newValue)} + value={dialogProps.chatflowApiKeyId ?? chatflowApiKeyId ?? 'Choose an API key'} + /> +
    @@ -756,7 +792,7 @@ formData.append("openAIApiKey[openAIEmbeddings_0]", "sk-my-openai-2nd-key")` here {' '} @@ -821,7 +857,9 @@ formData.append("openAIApiKey[openAIEmbeddings_0]", "sk-my-openai-2nd-key")` rows={nodeOverrides[nodeLabel]} columns={ nodeOverrides[nodeLabel].length > 0 - ? Object.keys(nodeOverrides[nodeLabel][0]) + ? Object.keys(nodeOverrides[nodeLabel][0]).filter( + (key) => key !== 'schema' + ) : [] } /> diff --git a/packages/ui/src/views/chatflows/ShareChatbot.jsx b/packages/ui/src/views/chatflows/ShareChatbot.jsx index 22b3ec1aa..f0ca8e9b8 100644 --- a/packages/ui/src/views/chatflows/ShareChatbot.jsx +++ b/packages/ui/src/views/chatflows/ShareChatbot.jsx @@ -8,8 +8,9 @@ import { Card, Box, Typography, Button, Switch, OutlinedInput, Popover, Stack, I import { useTheme } from '@mui/material/styles' // Project import -import { StyledButton } from '@/ui-component/button/StyledButton' import { TooltipWithParser } from '@/ui-component/tooltip/TooltipWithParser' +import { Available } from '@/ui-component/rbac/available' +import { StyledPermissionButton } from '@/ui-component/button/RBACButtons' // Icons import { IconX, IconCopy, IconArrowUpRightCircle } from '@tabler/icons-react' @@ -444,20 +445,22 @@ const ShareChatbot = ({ isSessionMemory, isAgentCanvas }) => {
    -
    - { - setChatflowIsPublic(event.target.checked) - onSwitchChange(event.target.checked) - }} - /> - Make Public - -
    + +
    + { + setChatflowIsPublic(event.target.checked) + onSwitchChange(event.target.checked) + }} + /> + Make Public + +
    +
    @@ -533,7 +536,8 @@ const ShareChatbot = ({ isSessionMemory, isAgentCanvas }) => { {colorField(textInputSendButtonColor, 'textInputSendButtonColor', 'TextIntput Send Button Color')} - { onClick={() => onSave()} > Save Changes - + { const theme = useTheme() const [isLoading, setLoading] = useState(true) - const [error, setError] = useState(null) const [images, setImages] = useState({}) const [search, setSearch] = useState('') - const [loginDialogOpen, setLoginDialogOpen] = useState(false) - const [loginDialogProps, setLoginDialogProps] = useState({}) + const { error, setError } = useError() const getAllChatflowsApi = useApi(chatflowsApi.getAllChatflows) const [view, setView] = useState(localStorage.getItem('flowDisplayStyle') || 'card') + /* Table Pagination */ + const [currentPage, setCurrentPage] = useState(1) + const [pageLimit, setPageLimit] = useState(DEFAULT_ITEMS_PER_PAGE) + const [total, setTotal] = useState(0) + + const onChange = (page, pageLimit) => { + setCurrentPage(page) + setPageLimit(pageLimit) + applyFilters(page, pageLimit) + } + + const applyFilters = (page, limit) => { + const params = { + page: page || currentPage, + limit: limit || pageLimit + } + getAllChatflowsApi.request(params) + } + const handleChange = (event, nextView) => { if (nextView === null) return localStorage.setItem('flowDisplayStyle', nextView) @@ -57,18 +75,12 @@ const Chatflows = () => { function filterFlows(data) { return ( - data.name.toLowerCase().indexOf(search.toLowerCase()) > -1 || + data?.name.toLowerCase().indexOf(search.toLowerCase()) > -1 || (data.category && data.category.toLowerCase().indexOf(search.toLowerCase()) > -1) || - data.id.toLowerCase().indexOf(search.toLowerCase()) > -1 + data?.id.toLowerCase().indexOf(search.toLowerCase()) > -1 ) } - const onLoginClick = (username, password) => { - localStorage.setItem('username', username) - localStorage.setItem('password', password) - navigate(0) - } - const addNew = () => { navigate('/canvas') } @@ -78,25 +90,10 @@ const Chatflows = () => { } useEffect(() => { - getAllChatflowsApi.request() - + applyFilters(currentPage, pageLimit) // eslint-disable-next-line react-hooks/exhaustive-deps }, []) - useEffect(() => { - if (getAllChatflowsApi.error) { - if (getAllChatflowsApi.error?.response?.status === 401) { - setLoginDialogProps({ - title: 'Login', - confirmButtonName: 'Login' - }) - setLoginDialogOpen(true) - } else { - setError(getAllChatflowsApi.error) - } - } - }, [getAllChatflowsApi.error]) - useEffect(() => { setLoading(getAllChatflowsApi.loading) }, [getAllChatflowsApi.loading]) @@ -104,7 +101,9 @@ const Chatflows = () => { useEffect(() => { if (getAllChatflowsApi.data) { try { - const chatflows = getAllChatflowsApi.data + const chatflows = getAllChatflowsApi.data?.data + const total = getAllChatflowsApi.data?.total + setTotal(total) const images = {} for (let i = 0; i < chatflows.length; i += 1) { const flowDataStr = chatflows[i].flowData @@ -112,9 +111,13 @@ const Chatflows = () => { const nodes = flowData.nodes || [] images[chatflows[i].id] = [] for (let j = 0; j < nodes.length; j += 1) { + if (nodes[j].data.name === 'stickyNote' || nodes[j].data.name === 'stickyNoteAgentflow') continue const imageSrc = `${baseURL}/api/v1/node-icon/${nodes[j].data.name}` - if (!images[chatflows[i].id].includes(imageSrc)) { - images[chatflows[i].id].push(imageSrc) + if (!images[chatflows[i].id].some((img) => img.imageSrc === imageSrc)) { + images[chatflows[i].id].push({ + imageSrc, + label: nodes[j].data.label + }) } } } @@ -142,6 +145,7 @@ const Chatflows = () => { sx={{ borderRadius: 2, maxHeight: 40 }} value={view} color='primary' + disabled={total === 0} exclusive onChange={handleChange} > @@ -170,37 +174,49 @@ const Chatflows = () => { - } sx={{ borderRadius: 2, height: 40 }}> + } + sx={{ borderRadius: 2, height: 40 }} + > Add New - + - {!view || view === 'card' ? ( + + {isLoading && ( + + + + + + )} + {!isLoading && total > 0 && ( <> - {isLoading && !getAllChatflowsApi.data ? ( + {!view || view === 'card' ? ( - - - - - ) : ( - - {getAllChatflowsApi.data?.filter(filterFlows).map((data, index) => ( + {getAllChatflowsApi.data?.data?.filter(filterFlows).map((data, index) => ( goToCanvas(data)} data={data} images={images[data.id]} /> ))} + ) : ( + )} + {/* Pagination and Page Size Controls */} + - ) : ( - )} - {!isLoading && (!getAllChatflowsApi.data || getAllChatflowsApi.data.length === 0) && ( + {!isLoading && (!getAllChatflowsApi.data?.data || getAllChatflowsApi.data?.data.length === 0) && ( { )} )} - - ) diff --git a/packages/ui/src/views/chatmessage/ChatMessage.jsx b/packages/ui/src/views/chatmessage/ChatMessage.jsx index d8c4a66ea..08ca5f27c 100644 --- a/packages/ui/src/views/chatmessage/ChatMessage.jsx +++ b/packages/ui/src/views/chatmessage/ChatMessage.jsx @@ -38,7 +38,8 @@ import { IconSquareFilled, IconCheck, IconPaperclip, - IconSparkles + IconSparkles, + IconVolume } from '@tabler/icons-react' import robotPNG from '@/assets/images/robot.png' import userPNG from '@/assets/images/account.png' @@ -49,6 +50,7 @@ import audioUploadSVG from '@/assets/images/wave-sound.jpg' // project import import NodeInputHandler from '@/views/canvas/NodeInputHandler' import { MemoizedReactMarkdown } from '@/ui-component/markdown/MemoizedReactMarkdown' +import { SafeHTML } from '@/ui-component/safe/SafeHTML' import SourceDocDialog from '@/ui-component/dialog/SourceDocDialog' import ChatFeedbackContentDialog from '@/ui-component/dialog/ChatFeedbackContentDialog' import StarterPromptsCard from '@/ui-component/cards/StarterPromptsCard' @@ -71,6 +73,7 @@ import attachmentsApi from '@/api/attachments' import chatmessagefeedbackApi from '@/api/chatmessagefeedback' import leadsApi from '@/api/lead' import executionsApi from '@/api/executions' +import ttsApi from '@/api/tts' // Hooks import useApi from '@/hooks/useApi' @@ -250,6 +253,27 @@ const ChatMessage = ({ open, chatflowid, isAgentCanvas, isDialog, previews, setP const [isConfigLoading, setIsConfigLoading] = useState(true) + // TTS state + const [isTTSLoading, setIsTTSLoading] = useState({}) + const [isTTSPlaying, setIsTTSPlaying] = useState({}) + const [ttsAudio, setTtsAudio] = useState({}) + const [isTTSEnabled, setIsTTSEnabled] = useState(false) + + // TTS streaming state + const [ttsStreamingState, setTtsStreamingState] = useState({ + mediaSource: null, + sourceBuffer: null, + audio: null, + chunkQueue: [], + isBuffering: false, + audioFormat: null, + abortController: null + }) + + // Ref to prevent auto-scroll during TTS actions (using ref to avoid re-renders) + const isTTSActionRef = useRef(false) + const ttsTimeoutRef = useRef(null) + const isFileAllowedForUpload = (file) => { const constraints = getAllowChatFlowUploads.data /** @@ -462,7 +486,12 @@ const ChatMessage = ({ open, chatflowid, isAgentCanvas, isDialog, previews, setP const handleAbort = async () => { setIsMessageStopping(true) try { + // Stop all TTS streams first + await handleTTSAbortAll() + stopAllTTS() + await chatmessageApi.abortMessage(chatflowid, chatId) + setIsMessageStopping(false) } catch (error) { setIsMessageStopping(false) enqueueSnackbar({ @@ -535,6 +564,22 @@ const ChatMessage = ({ open, chatflowid, isAgentCanvas, isDialog, previews, setP } } + // Helper function to manage TTS action flag + const setTTSAction = (isActive) => { + isTTSActionRef.current = isActive + if (ttsTimeoutRef.current) { + clearTimeout(ttsTimeoutRef.current) + ttsTimeoutRef.current = null + } + if (isActive) { + // Reset the flag after a longer delay to ensure all state changes are complete + ttsTimeoutRef.current = setTimeout(() => { + isTTSActionRef.current = false + ttsTimeoutRef.current = null + }, 300) + } + } + const onChange = useCallback((e) => setUserInput(e.target.value), [setUserInput]) const updateLastMessage = (text) => { @@ -642,11 +687,57 @@ const ChatMessage = ({ open, chatflowid, isAgentCanvas, isDialog, previews, setP setMessages((prevMessages) => { let allMessages = [...cloneDeep(prevMessages)] if (allMessages[allMessages.length - 1].type === 'userMessage') return allMessages + + // When usedTools are received, check if there are matching calledTools to replace + const lastMessage = allMessages[allMessages.length - 1] + if (lastMessage.calledTools && lastMessage.calledTools.length > 0) { + // Replace calledTools with usedTools for matching tool names + const updatedCalledTools = lastMessage.calledTools.map((calledTool) => { + const matchingUsedTool = usedTools.find((usedTool) => usedTool.tool === calledTool.tool) + return matchingUsedTool || calledTool + }) + + // Remove calledTools that have been replaced by usedTools + const remainingCalledTools = updatedCalledTools.filter( + (calledTool) => !usedTools.some((usedTool) => usedTool.tool === calledTool.tool) + ) + + allMessages[allMessages.length - 1].calledTools = remainingCalledTools.length > 0 ? remainingCalledTools : undefined + } + allMessages[allMessages.length - 1].usedTools = usedTools return allMessages }) } + const updateLastMessageCalledTools = (calledTools) => { + setMessages((prevMessages) => { + let allMessages = [...cloneDeep(prevMessages)] + if (allMessages[allMessages.length - 1].type === 'userMessage') return allMessages + allMessages[allMessages.length - 1].calledTools = calledTools + return allMessages + }) + } + + const cleanupCalledTools = () => { + setMessages((prevMessages) => { + let allMessages = [...cloneDeep(prevMessages)] + if (allMessages[allMessages.length - 1].type === 'userMessage') return allMessages + + // Remove any remaining calledTools when the stream ends + const lastMessage = allMessages[allMessages.length - 1] + if (lastMessage && lastMessage.calledTools && lastMessage.calledTools.length > 0) { + // Only remove if there are still calledTools and no matching usedTools + const hasUsedTools = lastMessage.usedTools && lastMessage.usedTools.length > 0 + if (!hasUsedTools) { + allMessages[allMessages.length - 1].calledTools = undefined + } + } + + return allMessages + }) + } + const updateLastMessageFileAnnotations = (fileAnnotations) => { setMessages((prevMessages) => { let allMessages = [...cloneDeep(prevMessages)] @@ -665,6 +756,7 @@ const ChatMessage = ({ open, chatflowid, isAgentCanvas, isDialog, previews, setP if (lastAgentReasoning && lastAgentReasoning.length > 0) { allMessages[allMessages.length - 1].agentReasoning = lastAgentReasoning.filter((reasoning) => !reasoning.nextAgent) } + allMessages[allMessages.length - 1].calledTools = undefined return allMessages }) setTimeout(() => { @@ -948,6 +1040,7 @@ const ChatMessage = ({ open, chatflowid, isAgentCanvas, isDialog, previews, setP setLoading(false) setUserInput('') setUploadedFiles([]) + setTimeout(() => { inputRef.current?.focus() scrollToBottom() @@ -963,8 +1056,6 @@ const ChatMessage = ({ open, chatflowid, isAgentCanvas, isDialog, previews, setP const fetchResponseFromEventStream = async (chatflowid, params) => { const chatId = params.chatId const input = params.question - const username = localStorage.getItem('username') - const password = localStorage.getItem('password') params.streaming = true await fetchEventSource(`${baseURL}/api/v1/internal-prediction/${chatflowid}`, { openWhenHidden: true, @@ -972,7 +1063,6 @@ const ChatMessage = ({ open, chatflowid, isAgentCanvas, isDialog, previews, setP body: JSON.stringify(params), headers: { 'Content-Type': 'application/json', - Authorization: username && password ? `Basic ${btoa(`${username}:${password}`)}` : undefined, 'x-request-from': 'internal' }, async onopen(response) { @@ -995,6 +1085,9 @@ const ChatMessage = ({ open, chatflowid, isAgentCanvas, isDialog, previews, setP case 'usedTools': updateLastMessageUsedTools(payload.data) break + case 'calledTools': + updateLastMessageCalledTools(payload.data) + break case 'fileAnnotations': updateLastMessageFileAnnotations(payload.data) break @@ -1029,13 +1122,27 @@ const ChatMessage = ({ open, chatflowid, isAgentCanvas, isDialog, previews, setP abortMessage(payload.data) closeResponse() break + case 'tts_start': + handleTTSStart(payload.data) + break + case 'tts_data': + handleTTSDataChunk(payload.data.audioChunk) + break + case 'tts_end': + handleTTSEnd() + break + case 'tts_abort': + handleTTSAbort(payload.data) + break case 'end': + cleanupCalledTools() setLocalStorageChatflow(chatflowid, chatId) closeResponse() break } }, async onclose() { + cleanupCalledTools() closeResponse() }, async onerror(err) { @@ -1047,6 +1154,7 @@ const ChatMessage = ({ open, chatflowid, isAgentCanvas, isDialog, previews, setP } const closeResponse = () => { + cleanupCalledTools() setLoading(false) setUserInput('') setUploadedFiles([]) @@ -1147,6 +1255,7 @@ const ChatMessage = ({ open, chatflowid, isAgentCanvas, isDialog, previews, setP } if (message.sourceDocuments) obj.sourceDocuments = message.sourceDocuments if (message.usedTools) obj.usedTools = message.usedTools + if (message.calledTools) obj.calledTools = message.calledTools if (message.fileAnnotations) obj.fileAnnotations = message.fileAnnotations if (message.agentReasoning) obj.agentReasoning = message.agentReasoning if (message.action) obj.action = message.action @@ -1295,6 +1404,30 @@ const ChatMessage = ({ open, chatflowid, isAgentCanvas, isDialog, previews, setP } } } + + // Check if TTS is configured + if (getChatflowConfig.data && getChatflowConfig.data.textToSpeech) { + try { + const ttsConfig = + typeof getChatflowConfig.data.textToSpeech === 'string' + ? JSON.parse(getChatflowConfig.data.textToSpeech) + : getChatflowConfig.data.textToSpeech + + let isEnabled = false + if (ttsConfig) { + Object.keys(ttsConfig).forEach((provider) => { + if (provider !== 'none' && ttsConfig?.[provider]?.status) { + isEnabled = true + } + }) + } + setIsTTSEnabled(isEnabled) + } catch (error) { + setIsTTSEnabled(false) + } + } else { + setIsTTSEnabled(false) + } // eslint-disable-next-line react-hooks/exhaustive-deps }, [getChatflowConfig.data]) @@ -1315,9 +1448,11 @@ const ChatMessage = ({ open, chatflowid, isAgentCanvas, isDialog, previews, setP } }, [isChatFlowAvailableForRAGFileUploads, fullFileUpload]) - // Auto scroll chat to bottom + // Auto scroll chat to bottom (but not during TTS actions) useEffect(() => { - scrollToBottom() + if (!isTTSActionRef.current) { + scrollToBottom() + } }, [messages]) useEffect(() => { @@ -1499,9 +1634,451 @@ const ChatMessage = ({ open, chatflowid, isAgentCanvas, isDialog, previews, setP return allMessages }) } + setIsLeadSaving(false) } + const cleanupTTSForMessage = (messageId) => { + if (ttsAudio[messageId]) { + ttsAudio[messageId].pause() + ttsAudio[messageId].currentTime = 0 + setTtsAudio((prev) => { + const newState = { ...prev } + delete newState[messageId] + return newState + }) + } + + if (ttsStreamingState.audio) { + ttsStreamingState.audio.pause() + cleanupTTSStreaming() + } + + setIsTTSPlaying((prev) => { + const newState = { ...prev } + delete newState[messageId] + return newState + }) + + setIsTTSLoading((prev) => { + const newState = { ...prev } + delete newState[messageId] + return newState + }) + } + + const handleTTSStop = async (messageId) => { + setTTSAction(true) + await ttsApi.abortTTS({ chatflowId: chatflowid, chatId, chatMessageId: messageId }) + cleanupTTSForMessage(messageId) + setIsMessageStopping(false) + } + + const stopAllTTS = () => { + Object.keys(ttsAudio).forEach((messageId) => { + if (ttsAudio[messageId]) { + ttsAudio[messageId].pause() + ttsAudio[messageId].currentTime = 0 + } + }) + setTtsAudio({}) + + if (ttsStreamingState.abortController) { + ttsStreamingState.abortController.abort() + } + + if (ttsStreamingState.audio) { + ttsStreamingState.audio.pause() + cleanupTTSStreaming() + } + + setIsTTSPlaying({}) + setIsTTSLoading({}) + } + + const handleTTSClick = async (messageId, messageText) => { + if (isTTSLoading[messageId]) return + + if (isTTSPlaying[messageId] || ttsAudio[messageId]) { + handleTTSStop(messageId) + return + } + + setTTSAction(true) + + // abort all ongoing streams and clear audio sources + await handleTTSAbortAll() + stopAllTTS() + + handleTTSStart({ chatMessageId: messageId, format: 'mp3' }) + try { + const abortController = new AbortController() + setTtsStreamingState((prev) => ({ ...prev, abortController })) + + const response = await fetch('/api/v1/text-to-speech/generate', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'x-request-from': 'internal' + }, + credentials: 'include', + signal: abortController.signal, + body: JSON.stringify({ + chatflowId: chatflowid, + chatId: chatId, + chatMessageId: messageId, + text: messageText + }) + }) + + if (!response.ok) { + throw new Error(`TTS request failed: ${response.status}`) + } + + const reader = response.body.getReader() + const decoder = new TextDecoder() + let buffer = '' + + let done = false + while (!done) { + if (abortController.signal.aborted) { + break + } + + const result = await reader.read() + done = result.done + if (done) { + break + } + const value = result.value + const chunk = decoder.decode(value, { stream: true }) + buffer += chunk + + const lines = buffer.split('\n\n') + buffer = lines.pop() || '' + + for (const eventBlock of lines) { + if (eventBlock.trim()) { + const event = parseSSEEvent(eventBlock) + if (event) { + switch (event.event) { + case 'tts_start': + break + case 'tts_data': + if (!abortController.signal.aborted) { + handleTTSDataChunk(event.data.audioChunk) + } + break + case 'tts_end': + if (!abortController.signal.aborted) { + handleTTSEnd() + } + break + } + } + } + } + } + } catch (error) { + if (error.name === 'AbortError') { + console.error('TTS request was aborted') + } else { + console.error('Error with TTS:', error) + enqueueSnackbar({ + message: `TTS failed: ${error.message}`, + options: { variant: 'error' } + }) + } + } finally { + setIsTTSLoading((prev) => { + const newState = { ...prev } + delete newState[messageId] + return newState + }) + } + } + + const parseSSEEvent = (eventBlock) => { + const lines = eventBlock.split('\n') + const event = {} + + for (const line of lines) { + if (line.startsWith('event:')) { + event.event = line.substring(6).trim() + } else if (line.startsWith('data:')) { + const dataStr = line.substring(5).trim() + try { + const parsed = JSON.parse(dataStr) + if (parsed.data) { + event.data = parsed.data + } + } catch (e) { + console.error('Error parsing SSE data:', e, 'Raw data:', dataStr) + } + } + } + + return event.event ? event : null + } + + const initializeTTSStreaming = (data) => { + try { + const mediaSource = new MediaSource() + const audio = new Audio() + audio.src = URL.createObjectURL(mediaSource) + + mediaSource.addEventListener('sourceopen', () => { + try { + const mimeType = data.format === 'mp3' ? 'audio/mpeg' : 'audio/mpeg' + const sourceBuffer = mediaSource.addSourceBuffer(mimeType) + + setTtsStreamingState((prevState) => ({ + ...prevState, + mediaSource, + sourceBuffer, + audio + })) + + audio.play().catch((playError) => { + console.error('Error starting audio playback:', playError) + }) + } catch (error) { + console.error('Error setting up source buffer:', error) + console.error('MediaSource readyState:', mediaSource.readyState) + console.error('Requested MIME type:', mimeType) + } + }) + + audio.addEventListener('playing', () => { + setIsTTSLoading((prevState) => { + const newState = { ...prevState } + delete newState[data.chatMessageId] + return newState + }) + setIsTTSPlaying((prevState) => ({ + ...prevState, + [data.chatMessageId]: true + })) + }) + + audio.addEventListener('ended', () => { + setIsTTSPlaying((prevState) => { + const newState = { ...prevState } + delete newState[data.chatMessageId] + return newState + }) + cleanupTTSStreaming() + }) + } catch (error) { + console.error('Error initializing TTS streaming:', error) + } + } + + const cleanupTTSStreaming = () => { + setTtsStreamingState((prevState) => { + if (prevState.abortController) { + prevState.abortController.abort() + } + + if (prevState.audio) { + prevState.audio.pause() + prevState.audio.removeAttribute('src') + if (prevState.audio.src) { + URL.revokeObjectURL(prevState.audio.src) + } + } + + if (prevState.mediaSource) { + if (prevState.mediaSource.readyState === 'open') { + try { + prevState.mediaSource.endOfStream() + } catch (e) { + // Ignore errors during cleanup + } + } + prevState.mediaSource.removeEventListener('sourceopen', () => {}) + } + + return { + mediaSource: null, + sourceBuffer: null, + audio: null, + chunkQueue: [], + isBuffering: false, + audioFormat: null, + abortController: null + } + }) + } + + const processChunkQueue = () => { + setTtsStreamingState((prevState) => { + if (!prevState.sourceBuffer || prevState.sourceBuffer.updating || prevState.chunkQueue.length === 0) { + return prevState + } + + const chunk = prevState.chunkQueue.shift() + + try { + prevState.sourceBuffer.appendBuffer(chunk) + return { + ...prevState, + chunkQueue: [...prevState.chunkQueue], + isBuffering: true + } + } catch (error) { + console.error('Error appending chunk to buffer:', error) + return prevState + } + }) + } + + const handleTTSStart = (data) => { + setTTSAction(true) + + // Stop all existing TTS audio before starting new stream + stopAllTTS() + + setIsTTSLoading((prevState) => ({ + ...prevState, + [data.chatMessageId]: true + })) + setMessages((prevMessages) => { + const allMessages = [...cloneDeep(prevMessages)] + const lastMessage = allMessages[allMessages.length - 1] + if (lastMessage.type === 'userMessage') return allMessages + if (lastMessage.id) return allMessages + allMessages[allMessages.length - 1].id = data.chatMessageId + return allMessages + }) + setTtsStreamingState({ + mediaSource: null, + sourceBuffer: null, + audio: null, + chunkQueue: [], + isBuffering: false, + audioFormat: data.format, + abortController: null + }) + + setTimeout(() => initializeTTSStreaming(data), 0) + } + + const handleTTSDataChunk = (base64Data) => { + try { + const audioBuffer = Uint8Array.from(atob(base64Data), (c) => c.charCodeAt(0)) + + setTtsStreamingState((prevState) => { + const newState = { + ...prevState, + chunkQueue: [...prevState.chunkQueue, audioBuffer] + } + + if (prevState.sourceBuffer && !prevState.sourceBuffer.updating) { + setTimeout(() => processChunkQueue(), 0) + } + + return newState + }) + } catch (error) { + console.error('Error handling TTS data chunk:', error) + } + } + + const handleTTSEnd = () => { + setTtsStreamingState((prevState) => { + if (prevState.mediaSource && prevState.mediaSource.readyState === 'open') { + try { + if (prevState.sourceBuffer && prevState.chunkQueue.length > 0 && !prevState.sourceBuffer.updating) { + const remainingChunks = [...prevState.chunkQueue] + remainingChunks.forEach((chunk, index) => { + setTimeout(() => { + if (prevState.sourceBuffer && !prevState.sourceBuffer.updating) { + try { + prevState.sourceBuffer.appendBuffer(chunk) + if (index === remainingChunks.length - 1) { + setTimeout(() => { + if (prevState.mediaSource && prevState.mediaSource.readyState === 'open') { + prevState.mediaSource.endOfStream() + } + }, 100) + } + } catch (error) { + console.error('Error appending remaining chunk:', error) + } + } + }, index * 50) + }) + return { + ...prevState, + chunkQueue: [] + } + } + + if (prevState.sourceBuffer && !prevState.sourceBuffer.updating) { + prevState.mediaSource.endOfStream() + } else if (prevState.sourceBuffer) { + prevState.sourceBuffer.addEventListener( + 'updateend', + () => { + if (prevState.mediaSource && prevState.mediaSource.readyState === 'open') { + prevState.mediaSource.endOfStream() + } + }, + { once: true } + ) + } + } catch (error) { + console.error('Error ending TTS stream:', error) + } + } + return prevState + }) + } + + const handleTTSAbort = (data) => { + const messageId = data.chatMessageId + cleanupTTSForMessage(messageId) + } + + const handleTTSAbortAll = async () => { + const activeTTSMessages = Object.keys(isTTSLoading).concat(Object.keys(isTTSPlaying)) + for (const messageId of activeTTSMessages) { + await ttsApi.abortTTS({ chatflowId: chatflowid, chatId, chatMessageId: messageId }) + } + } + + useEffect(() => { + if (ttsStreamingState.sourceBuffer) { + const sourceBuffer = ttsStreamingState.sourceBuffer + + const handleUpdateEnd = () => { + setTtsStreamingState((prevState) => ({ + ...prevState, + isBuffering: false + })) + setTimeout(() => processChunkQueue(), 0) + } + + sourceBuffer.addEventListener('updateend', handleUpdateEnd) + + return () => { + sourceBuffer.removeEventListener('updateend', handleUpdateEnd) + } + } + }, [ttsStreamingState.sourceBuffer]) + + useEffect(() => { + return () => { + cleanupTTSStreaming() + // Cleanup TTS timeout on unmount + if (ttsTimeoutRef.current) { + clearTimeout(ttsTimeoutRef.current) + ttsTimeoutRef.current = null + } + } + }, []) + const getInputDisabled = () => { return ( loading || @@ -1662,7 +2239,7 @@ const ChatMessage = ({ open, chatflowid, isAgentCanvas, isDialog, previews, setP } else if (item.type === 'html') { return (
    -
    +
    ) } else { @@ -1885,6 +2462,7 @@ const ChatMessage = ({ open, chatflowid, isAgentCanvas, isDialog, previews, setP removeDuplicateURL={removeDuplicateURL} isValidURL={isValidURL} onURLClick={onURLClick} + getLabel={getLabel} /> ))}
    @@ -1899,6 +2477,42 @@ const ChatMessage = ({ open, chatflowid, isAgentCanvas, isDialog, previews, setP sessionId={chatId} /> )} + {message.calledTools && ( +
    + {message.calledTools.map((tool, index) => { + return tool ? ( + } + onClick={() => onSourceDialogClick(tool, 'Called Tools')} + /> + ) : null + })} +
    + )} {message.usedTools && (
    )} - {message.type === 'apiMessage' && message.id && chatFeedbackStatus ? ( + {message.type === 'apiMessage' && message.id ? ( <> - copyMessageToClipboard(message.message)} /> - {!message.feedback || - message.feedback.rating === '' || - message.feedback.rating === 'THUMBS_UP' ? ( - onThumbsUpClick(message.id)} - /> - ) : null} - {!message.feedback || - message.feedback.rating === '' || - message.feedback.rating === 'THUMBS_DOWN' ? ( - onThumbsDownClick(message.id)} - /> - ) : null} + {isTTSEnabled && ( + + isTTSPlaying[message.id] + ? handleTTSStop(message.id) + : handleTTSClick(message.id, message.message) + } + disabled={isTTSLoading[message.id]} + sx={{ + backgroundColor: ttsAudio[message.id] ? 'primary.main' : 'transparent', + color: ttsAudio[message.id] ? 'white' : 'inherit', + '&:hover': { + backgroundColor: ttsAudio[message.id] ? 'primary.dark' : 'action.hover' + } + }} + > + {isTTSLoading[message.id] ? ( + + ) : isTTSPlaying[message.id] ? ( + + ) : ( + + )} + + )} + {chatFeedbackStatus && ( + <> + copyMessageToClipboard(message.message)} + /> + {!message.feedback || + message.feedback.rating === '' || + message.feedback.rating === 'THUMBS_UP' ? ( + onThumbsUpClick(message.id)} + /> + ) : null} + {!message.feedback || + message.feedback.rating === '' || + message.feedback.rating === 'THUMBS_DOWN' ? ( + onThumbsDownClick(message.id)} + /> + ) : null} + + )} ) : null} diff --git a/packages/ui/src/views/credentials/AddEditCredentialDialog.jsx b/packages/ui/src/views/credentials/AddEditCredentialDialog.jsx index 13cddd162..fecdb30a7 100644 --- a/packages/ui/src/views/credentials/AddEditCredentialDialog.jsx +++ b/packages/ui/src/views/credentials/AddEditCredentialDialog.jsx @@ -14,10 +14,11 @@ import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' import CredentialInputHandler from './CredentialInputHandler' // Icons -import { IconX } from '@tabler/icons-react' +import { IconHandStop, IconX } from '@tabler/icons-react' // API import credentialsApi from '@/api/credentials' +import oauth2Api from '@/api/oauth2' // Hooks import useApi from '@/hooks/useApi' @@ -50,17 +51,22 @@ const AddEditCredentialDialog = ({ show, dialogProps, onCancel, onConfirm, setEr const [name, setName] = useState('') const [credentialData, setCredentialData] = useState({}) const [componentCredential, setComponentCredential] = useState({}) + const [shared, setShared] = useState(false) useEffect(() => { if (getSpecificCredentialApi.data) { - setCredential(getSpecificCredentialApi.data) - if (getSpecificCredentialApi.data.name) { - setName(getSpecificCredentialApi.data.name) + const shared = getSpecificCredentialApi.data.shared + setShared(shared) + if (!shared) { + setCredential(getSpecificCredentialApi.data) + if (getSpecificCredentialApi.data.name) { + setName(getSpecificCredentialApi.data.name) + } + if (getSpecificCredentialApi.data.plainDataObj) { + setCredentialData(getSpecificCredentialApi.data.plainDataObj) + } + getSpecificComponentCredentialApi.request(getSpecificCredentialApi.data.credentialName) } - if (getSpecificCredentialApi.data.plainDataObj) { - setCredentialData(getSpecificCredentialApi.data.plainDataObj) - } - getSpecificComponentCredentialApi.request(getSpecificCredentialApi.data.credentialName) } // eslint-disable-next-line react-hooks/exhaustive-deps @@ -207,6 +213,149 @@ const AddEditCredentialDialog = ({ show, dialogProps, onCancel, onConfirm, setEr } } + const setOAuth2 = async () => { + try { + let credentialId = null + + // First save or add the credential + if (dialogProps.type === 'ADD') { + // Add new credential first + const obj = { + name, + credentialName: componentCredential.name, + plainDataObj: credentialData + } + const createResp = await credentialsApi.createCredential(obj) + if (createResp.data) { + credentialId = createResp.data.id + } + } else { + // Save existing credential first + const saveObj = { + name, + credentialName: componentCredential.name + } + + let plainDataObj = {} + for (const key in credentialData) { + if (credentialData[key] !== REDACTED_CREDENTIAL_VALUE) { + plainDataObj[key] = credentialData[key] + } + } + if (Object.keys(plainDataObj).length) saveObj.plainDataObj = plainDataObj + + const saveResp = await credentialsApi.updateCredential(credential.id, saveObj) + if (saveResp.data) { + credentialId = credential.id + } + } + + if (!credentialId) { + throw new Error('Failed to save credential') + } + + const authResponse = await oauth2Api.authorize(credentialId) + + if (authResponse.data && authResponse.data.success && authResponse.data.authorizationUrl) { + // Open the authorization URL in a new window/tab + const authWindow = window.open( + authResponse.data.authorizationUrl, + '_blank', + 'width=600,height=700,scrollbars=yes,resizable=yes' + ) + + if (!authWindow) { + throw new Error('Failed to open authorization window. Please check if popups are blocked.') + } + + // Listen for messages from the popup window + const handleMessage = (event) => { + // Verify origin if needed (you may want to add origin checking) + if (event.data && (event.data.type === 'OAUTH2_SUCCESS' || event.data.type === 'OAUTH2_ERROR')) { + window.removeEventListener('message', handleMessage) + + if (event.data.type === 'OAUTH2_SUCCESS') { + enqueueSnackbar({ + message: 'OAuth2 authorization completed successfully', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm(credentialId) + } else if (event.data.type === 'OAUTH2_ERROR') { + enqueueSnackbar({ + message: event.data.message || 'OAuth2 authorization failed', + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } + + // Close the auth window if it's still open + if (authWindow && !authWindow.closed) { + authWindow.close() + } + } + } + + // Add message listener + window.addEventListener('message', handleMessage) + + // Fallback: Monitor the auth window and handle if it closes manually + const checkClosed = setInterval(() => { + if (authWindow.closed) { + clearInterval(checkClosed) + window.removeEventListener('message', handleMessage) + + // If no message was received, assume user closed window manually + // Don't show error in this case, just close dialog + onConfirm(credentialId) + } + }, 1000) + + // Cleanup after a reasonable timeout (5 minutes) + setTimeout(() => { + clearInterval(checkClosed) + window.removeEventListener('message', handleMessage) + if (authWindow && !authWindow.closed) { + authWindow.close() + } + }, 300000) // 5 minutes + } else { + throw new Error('Invalid response from authorization endpoint') + } + } catch (error) { + console.error('OAuth2 authorization error:', error) + if (setError) setError(error) + enqueueSnackbar({ + message: `OAuth2 authorization failed: ${error.response?.data?.message || error.message || 'Unknown error'}`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } + } + const component = show ? ( - {componentCredential && componentCredential.label && ( + {!shared && componentCredential && componentCredential.label && (
    - {componentCredential && componentCredential.description && ( + {shared && ( +
    +
    + + Cannot edit shared credential. +
    +
    + )} + {!shared && componentCredential && componentCredential.description && (
    )} - {componentCredential && componentCredential.label && ( + {!shared && componentCredential && componentCredential.label && ( @@ -286,20 +459,45 @@ const AddEditCredentialDialog = ({ show, dialogProps, onCancel, onConfirm, setEr /> )} - {componentCredential && + {!shared && componentCredential && componentCredential.name && componentCredential.name.includes('OAuth2') && ( + + + OAuth Redirect URL + + + + )} + {!shared && + componentCredential && componentCredential.inputs && - componentCredential.inputs.map((inputParam, index) => ( - - ))} + componentCredential.inputs + .filter((inputParam) => inputParam.hidden !== true) + .map((inputParam, index) => )} + + {!shared && componentCredential && componentCredential.name && componentCredential.name.includes('OAuth2') && ( + + + + )} - (dialogProps.type === 'ADD' ? addNewCredential() : saveCredential())} - > - {dialogProps.confirmButtonName} - + {!shared && ( + (dialogProps.type === 'ADD' ? addNewCredential() : saveCredential())} + > + {dialogProps.confirmButtonName} + + )}
    diff --git a/packages/ui/src/views/credentials/CredentialListDialog.jsx b/packages/ui/src/views/credentials/CredentialListDialog.jsx index d5d1058d2..edf5d6e23 100644 --- a/packages/ui/src/views/credentials/CredentialListDialog.jsx +++ b/packages/ui/src/views/credentials/CredentialListDialog.jsx @@ -140,7 +140,11 @@ const CredentialListDialog = ({ show, dialogProps, onCancel, onCredentialSelecte width: 50, height: 50, borderRadius: '50%', - backgroundColor: 'white' + backgroundColor: 'white', + flexShrink: 0, + display: 'flex', + alignItems: 'center', + justifyContent: 'center' }} > ({ borderColor: theme.palette.grey[900] + 25, @@ -77,12 +78,12 @@ const Credentials = () => { const customization = useSelector((state) => state.customization) const dispatch = useDispatch() useNotifier() + const { error, setError } = useError() const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) const [isLoading, setLoading] = useState(true) - const [error, setError] = useState(null) const [showCredentialListDialog, setShowCredentialListDialog] = useState(false) const [credentialListDialogProps, setCredentialListDialogProps] = useState({}) const [showSpecificCredentialDialog, setShowSpecificCredentialDialog] = useState(false) @@ -90,6 +91,9 @@ const Credentials = () => { const [credentials, setCredentials] = useState([]) const [componentsCredentials, setComponentsCredentials] = useState([]) + const [showShareCredentialDialog, setShowShareCredentialDialog] = useState(false) + const [shareCredentialDialogProps, setShareCredentialDialogProps] = useState({}) + const { confirm } = useConfirm() const getAllCredentialsApi = useApi(credentialsApi.getAllCredentials) @@ -134,6 +138,22 @@ const Credentials = () => { setShowSpecificCredentialDialog(true) } + const share = (credential) => { + const dialogProps = { + type: 'EDIT', + cancelButtonName: 'Cancel', + confirmButtonName: 'Share', + data: { + id: credential.id, + name: credential.name, + title: 'Share Credential', + itemType: 'credential' + } + } + setShareCredentialDialogProps(dialogProps) + setShowShareCredentialDialog(true) + } + const deleteCredential = async (credential) => { const confirmPayload = { title: `Delete`, @@ -177,7 +197,6 @@ const Credentials = () => { ) } }) - onCancel() } } } @@ -209,12 +228,6 @@ const Credentials = () => { } }, [getAllCredentialsApi.data]) - useEffect(() => { - if (getAllCredentialsApi.error) { - setError(getAllCredentialsApi.error) - } - }, [getAllCredentialsApi.error]) - useEffect(() => { if (getAllComponentsCredentialsApi.data) { setComponentsCredentials(getAllComponentsCredentialsApi.data) @@ -236,14 +249,15 @@ const Credentials = () => { title='Credentials' description='API keys, tokens, and secrets for 3rd party integrations' > - } > Add Credential - + {!isLoading && credentials.length <= 0 ? ( @@ -274,8 +288,9 @@ const Credentials = () => { Name Last Updated Created - - + + + @@ -297,6 +312,9 @@ const Credentials = () => { + + + @@ -314,6 +332,9 @@ const Credentials = () => { + + + ) : ( @@ -364,20 +385,46 @@ const Credentials = () => { {moment(credential.createdDate).format('MMMM Do, YYYY HH:mm:ss')} - - edit(credential)}> - - - - - deleteCredential(credential)} - > - - - + {!credential.shared && ( + <> + + share(credential)} + > + + + + + edit(credential)} + > + + + + + deleteCredential(credential)} + > + + + + + )} + {credential.shared && ( + <> + Shared Credential + + )} ))} @@ -395,13 +442,23 @@ const Credentials = () => { onCancel={() => setShowCredentialListDialog(false)} onCredentialSelected={onCredentialSelected} > - setShowSpecificCredentialDialog(false)} - onConfirm={onConfirm} - setError={setError} - > + {showSpecificCredentialDialog && ( + setShowSpecificCredentialDialog(false)} + onConfirm={onConfirm} + setError={setError} + > + )} + {showShareCredentialDialog && ( + setShowShareCredentialDialog(false)} + setError={setError} + > + )} ) diff --git a/packages/ui/src/views/datasets/AddEditDatasetDialog.jsx b/packages/ui/src/views/datasets/AddEditDatasetDialog.jsx new file mode 100644 index 000000000..babb70eb5 --- /dev/null +++ b/packages/ui/src/views/datasets/AddEditDatasetDialog.jsx @@ -0,0 +1,270 @@ +import { createPortal } from 'react-dom' +import PropTypes from 'prop-types' +import { useState, useEffect } from 'react' +import { useDispatch } from 'react-redux' +import { enqueueSnackbar as enqueueSnackbarAction, closeSnackbar as closeSnackbarAction } from '@/store/actions' + +// Material +import { Button, Dialog, DialogActions, DialogContent, DialogTitle, Box, Typography, OutlinedInput } from '@mui/material' + +// Project imports +import { StyledButton } from '@/ui-component/button/StyledButton' +import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' +import { File } from '@/ui-component/file/File' +import { SwitchInput } from '@/ui-component/switch/Switch' +import { TooltipWithParser } from '@/ui-component/tooltip/TooltipWithParser' + +// Icons +import { IconX, IconDatabase } from '@tabler/icons-react' + +// API +import datasetApi from '@/api/dataset' + +// Hooks + +// utils +import useNotifier from '@/utils/useNotifier' + +// const +import { HIDE_CANVAS_DIALOG, SHOW_CANVAS_DIALOG } from '@/store/actions' +const CSVFORMAT = `Only the first 2 columns will be considered: +---------------------------- +| Input | Output | +---------------------------- +| test input | test output | +---------------------------- +` + +const AddEditDatasetDialog = ({ show, dialogProps, onCancel, onConfirm }) => { + const portalElement = document.getElementById('portal') + + const dispatch = useDispatch() + + // ==============================|| Snackbar ||============================== // + + useNotifier() + + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const [datasetName, setDatasetName] = useState('') + const [datasetDescription, setDatasetDescription] = useState('') + const [dialogType, setDialogType] = useState('ADD') + const [dataset, setDataset] = useState({}) + const [firstRowHeaders, setFirstRowHeaders] = useState(false) + const [selectedFile, setSelectedFile] = useState() + + useEffect(() => { + if (dialogProps.type === 'EDIT' && dialogProps.data) { + setDatasetName(dialogProps.data.name) + setDatasetDescription(dialogProps.data.description) + setDialogType('EDIT') + setDataset(dialogProps.data) + } else if (dialogProps.type === 'ADD') { + setDatasetName('') + setDatasetDescription('') + setDialogType('ADD') + setDataset({}) + } + + return () => { + setDatasetName('') + setDatasetDescription('') + setDialogType('ADD') + setDataset({}) + } + }, [dialogProps]) + + useEffect(() => { + if (show) dispatch({ type: SHOW_CANVAS_DIALOG }) + else dispatch({ type: HIDE_CANVAS_DIALOG }) + return () => dispatch({ type: HIDE_CANVAS_DIALOG }) + }, [show, dispatch]) + + const addNewDataset = async () => { + try { + const obj = { + name: datasetName, + description: datasetDescription + } + if (selectedFile) { + obj.firstRowHeaders = firstRowHeaders + obj.csvFile = selectedFile + } + const createResp = await datasetApi.createDataset(obj) + if (createResp.data) { + enqueueSnackbar({ + message: 'New Dataset added', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm(createResp.data.id) + } + } catch (error) { + enqueueSnackbar({ + message: `Failed to add new Dataset: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + onCancel() + } + } + + const saveDataset = async () => { + try { + const saveObj = { + name: datasetName, + description: datasetDescription + } + + const saveResp = await datasetApi.updateDataset(dataset.id, saveObj) + if (saveResp.data) { + enqueueSnackbar({ + message: 'Dataset saved', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm(saveResp.data.id) + } + } catch (error) { + enqueueSnackbar({ + message: `Failed to save Dataset: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + onCancel() + } + } + + const component = show ? ( + + +
    + + {dialogProps.type === 'ADD' ? 'Add Dataset' : 'Edit Dataset'} +
    +
    + + +
    + + Name * + +
    +
    + setDatasetName(e.target.value)} + value={datasetName ?? ''} + /> +
    + +
    + Description +
    +
    + setDatasetDescription(e.target.value)} + value={datasetDescription ?? ''} + /> +
    + {dialogType === 'ADD' && ( + +
    + + Upload CSV + ${CSVFORMAT}`} /> + +
    +
    + setSelectedFile(newValue)} + value={selectedFile ?? 'Choose a file to upload'} + /> + +
    + )} +
    + + + (dialogType === 'ADD' ? addNewDataset() : saveDataset())} + > + {dialogProps.confirmButtonName} + + + +
    + ) : null + + return createPortal(component, portalElement) +} + +AddEditDatasetDialog.propTypes = { + show: PropTypes.bool, + dialogProps: PropTypes.object, + onCancel: PropTypes.func, + onConfirm: PropTypes.func +} + +export default AddEditDatasetDialog diff --git a/packages/ui/src/views/datasets/AddEditDatasetRowDialog.jsx b/packages/ui/src/views/datasets/AddEditDatasetRowDialog.jsx new file mode 100644 index 000000000..920a33a6c --- /dev/null +++ b/packages/ui/src/views/datasets/AddEditDatasetRowDialog.jsx @@ -0,0 +1,244 @@ +import { createPortal } from 'react-dom' +import PropTypes from 'prop-types' +import { useState, useEffect } from 'react' +import { useDispatch } from 'react-redux' +import { enqueueSnackbar as enqueueSnackbarAction, closeSnackbar as closeSnackbarAction } from '@/store/actions' + +// Material +import { Button, Dialog, DialogActions, DialogContent, DialogTitle, Box, Typography, OutlinedInput } from '@mui/material' + +// Project imports +import { StyledPermissionButton } from '@/ui-component/button/RBACButtons' +import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' + +// Icons +import { IconX, IconDatabase } from '@tabler/icons-react' + +// API +import datasetApi from '@/api/dataset' + +// Hooks + +// utils +import useNotifier from '@/utils/useNotifier' + +// const +import { HIDE_CANVAS_DIALOG, SHOW_CANVAS_DIALOG } from '@/store/actions' + +const AddEditDatasetRowDialog = ({ show, dialogProps, onCancel, onConfirm }) => { + const portalElement = document.getElementById('portal') + + const dispatch = useDispatch() + + // ==============================|| Snackbar ||============================== // + + useNotifier() + + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const [datasetId, setDatasetId] = useState('') + const [datasetName, setDatasetName] = useState('') + const [input, setInput] = useState('') + const [output, setOutput] = useState('') + const [dialogType, setDialogType] = useState('ADD') + const [row, setRow] = useState({}) + + useEffect(() => { + if (dialogProps.type === 'EDIT' && dialogProps.data) { + setDatasetId(dialogProps.data.datasetId) + setDatasetName(dialogProps.data.datasetName) + setDialogType('EDIT') + setRow(dialogProps.data) + setInput(dialogProps.data.input) + setOutput(dialogProps.data.output) + } else if (dialogProps.type === 'ADD') { + setDatasetId(dialogProps.data.datasetId) + setDatasetName(dialogProps.data.datasetName) + setDialogType('ADD') + setRow({}) + } + + return () => { + setInput('') + setOutput('') + setDialogType('ADD') + setRow({}) + } + }, [dialogProps]) + + useEffect(() => { + if (show) dispatch({ type: SHOW_CANVAS_DIALOG }) + else dispatch({ type: HIDE_CANVAS_DIALOG }) + return () => dispatch({ type: HIDE_CANVAS_DIALOG }) + }, [show, dispatch]) + + const addNewDatasetRow = async () => { + try { + const obj = { + datasetId: datasetId, + input: input, + output: output + } + const createResp = await datasetApi.createDatasetRow(obj) + if (createResp.data) { + enqueueSnackbar({ + message: 'New Row added for the given Dataset', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm(createResp.data.id) + } + } catch (error) { + enqueueSnackbar({ + message: `Failed to add new row in the Dataset: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + onCancel() + } + } + + const saveDatasetRow = async () => { + try { + const saveObj = { + input: input, + output: output + } + + const saveResp = await datasetApi.updateDatasetRow(row.id, saveObj) + if (saveResp.data) { + enqueueSnackbar({ + message: 'Dataset Row saved', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm(saveResp.data.id) + } + } catch (error) { + enqueueSnackbar({ + message: `Failed to save Dataset Row: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + onCancel() + } + } + + const component = show ? ( + + +
    + + {dialogProps.type === 'ADD' ? `Add Item to ${datasetName} Dataset` : `Edit Item in ${datasetName} Dataset`} +
    +
    + + +
    + + Input * + + +
    +
    + setInput(e.target.value)} + value={input ?? ''} + /> +
    + +
    + + Anticipated Output * + + +
    +
    + setOutput(e.target.value)} + value={output ?? ''} + /> +
    +
    + + + (dialogType === 'ADD' ? addNewDatasetRow() : saveDatasetRow())} + > + {dialogProps.confirmButtonName} + + + +
    + ) : null + + return createPortal(component, portalElement) +} + +AddEditDatasetRowDialog.propTypes = { + show: PropTypes.bool, + dialogProps: PropTypes.object, + onCancel: PropTypes.func, + onConfirm: PropTypes.func +} + +export default AddEditDatasetRowDialog diff --git a/packages/ui/src/views/datasets/DatasetItems.jsx b/packages/ui/src/views/datasets/DatasetItems.jsx new file mode 100644 index 000000000..868f17fe3 --- /dev/null +++ b/packages/ui/src/views/datasets/DatasetItems.jsx @@ -0,0 +1,509 @@ +import { useEffect, useRef, useState } from 'react' +import React from 'react' +import { useDispatch, useSelector } from 'react-redux' + +// material-ui +import { + Checkbox, + Skeleton, + Box, + TableRow, + TableContainer, + Paper, + Table, + TableHead, + TableBody, + Button, + Stack, + Typography +} from '@mui/material' +import { useTheme } from '@mui/material/styles' + +// project imports +import MainCard from '@/ui-component/cards/MainCard' +import { StyledTableCell, StyledTableRow } from '@/ui-component/table/TableStyles' +import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' +import AddEditDatasetRowDialog from './AddEditDatasetRowDialog' +import UploadCSVFileDialog from '@/views/datasets/UploadCSVFileDialog' +import ErrorBoundary from '@/ErrorBoundary' +import { useError } from '@/store/context/ErrorContext' +import ViewHeader from '@/layout/MainLayout/ViewHeader' +import { PermissionButton, StyledPermissionButton } from '@/ui-component/button/RBACButtons' +import AddEditDatasetDialog from '@/views/datasets/AddEditDatasetDialog' +import TablePagination, { DEFAULT_ITEMS_PER_PAGE } from '@/ui-component/pagination/TablePagination' + +// API +import datasetsApi from '@/api/dataset' + +// Hooks +import useApi from '@/hooks/useApi' +import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction } from '@/store/actions' +import useNotifier from '@/utils/useNotifier' +import useConfirm from '@/hooks/useConfirm' +import { useAuth } from '@/hooks/useAuth' + +// icons +import empty_datasetSVG from '@/assets/images/empty_datasets.svg' +import { IconTrash, IconPlus, IconX, IconUpload, IconArrowsDownUp } from '@tabler/icons-react' +import DragIndicatorIcon from '@mui/icons-material/DragIndicator' + +// ==============================|| Dataset Items ||============================== // + +const EvalDatasetRows = () => { + const theme = useTheme() + const customization = useSelector((state) => state.customization) + const dispatch = useDispatch() + useNotifier() + const { error } = useError() + + const [showRowDialog, setShowRowDialog] = useState(false) + const [showUploadDialog, setShowUploadDialog] = useState(false) + const [rowDialogProps, setRowDialogProps] = useState({}) + const [showDatasetDialog, setShowDatasetDialog] = useState(false) + const [datasetDialogProps, setDatasetDialogProps] = useState({}) + + const [dataset, setDataset] = useState([]) + const [isLoading, setLoading] = useState(true) + const [selected, setSelected] = useState([]) + + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const { confirm } = useConfirm() + + const getDatasetRows = useApi(datasetsApi.getDataset) + const reorderDatasetRowApi = useApi(datasetsApi.reorderDatasetRow) + + const URLpath = document.location.pathname.toString().split('/') + const datasetId = URLpath[URLpath.length - 1] === 'dataset_rows' ? '' : URLpath[URLpath.length - 1] + + const { hasPermission } = useAuth() + + const draggingItem = useRef() + const dragOverItem = useRef() + const [Draggable, setDraggable] = useState(false) + const [startDragPos, setStartDragPos] = useState(-1) + const [endDragPos, setEndDragPos] = useState(-1) + + /* Table Pagination */ + const [currentPage, setCurrentPage] = useState(1) + const [pageLimit, setPageLimit] = useState(DEFAULT_ITEMS_PER_PAGE) + const [total, setTotal] = useState(0) + const onChange = (page, pageLimit) => { + setCurrentPage(page) + setPageLimit(pageLimit) + refresh(page, pageLimit) + } + + const refresh = (page, limit) => { + setLoading(true) + const params = { + page: page || currentPage, + limit: limit || pageLimit + } + getDatasetRows.request(datasetId, params) + } + + const handleDragStart = (e, position) => { + draggingItem.current = position + setStartDragPos(position) + setEndDragPos(-1) + } + const handleDragEnter = (e, position) => { + setEndDragPos(position) + dragOverItem.current = position + } + + const handleDragEnd = (e, position) => { + dragOverItem.current = position + const updatedDataset = { ...dataset } + updatedDataset.rows.splice(endDragPos, 0, dataset.rows.splice(startDragPos, 1)[0]) + setDataset({ ...updatedDataset }) + e.preventDefault() + const updatedRows = [] + + dataset.rows.map((item, index) => { + updatedRows.push({ + id: item.id, + sequenceNo: index + }) + }) + reorderDatasetRowApi.request({ datasetId: datasetId, rows: updatedRows }) + } + + const onSelectAllClick = (event) => { + if (event.target.checked) { + const newSelected = (dataset?.rows || []).map((n) => n.id) + setSelected(newSelected) + return + } + setSelected([]) + } + + const handleSelect = (event, id) => { + const selectedIndex = selected.indexOf(id) + let newSelected = [] + + if (selectedIndex === -1) { + newSelected = newSelected.concat(selected, id) + } else if (selectedIndex === 0) { + newSelected = newSelected.concat(selected.slice(1)) + } else if (selectedIndex === selected.length - 1) { + newSelected = newSelected.concat(selected.slice(0, -1)) + } else if (selectedIndex > 0) { + newSelected = newSelected.concat(selected.slice(0, selectedIndex), selected.slice(selectedIndex + 1)) + } + setSelected(newSelected) + } + + const addNew = () => { + const dialogProp = { + type: 'ADD', + cancelButtonName: 'Cancel', + confirmButtonName: 'Add', + data: { + datasetId: datasetId, + datasetName: dataset.name + } + } + setRowDialogProps(dialogProp) + setShowRowDialog(true) + } + + const uploadCSV = () => { + const dialogProp = { + type: 'ADD', + cancelButtonName: 'Cancel', + confirmButtonName: 'Upload', + data: { + datasetId: datasetId, + datasetName: dataset.name + } + } + setRowDialogProps(dialogProp) + setShowUploadDialog(true) + } + + const editDs = () => { + const dialogProp = { + type: 'EDIT', + cancelButtonName: 'Cancel', + confirmButtonName: 'Save', + data: dataset + } + setDatasetDialogProps(dialogProp) + setShowDatasetDialog(true) + } + + const edit = (item) => { + const dialogProp = { + type: 'EDIT', + cancelButtonName: 'Cancel', + confirmButtonName: 'Save', + data: { + datasetName: dataset.name, + ...item + } + } + setRowDialogProps(dialogProp) + setShowRowDialog(true) + } + + const deleteDatasetItems = async () => { + const confirmPayload = { + title: `Delete`, + description: `Delete ${selected.length} dataset items?`, + confirmButtonName: 'Delete', + cancelButtonName: 'Cancel' + } + const isConfirmed = await confirm(confirmPayload) + + if (isConfirmed) { + try { + const deleteResp = await datasetsApi.deleteDatasetItems(selected) + if (deleteResp.data) { + enqueueSnackbar({ + message: 'Dataset Items deleted', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm() + } + } catch (error) { + enqueueSnackbar({ + message: `Failed to delete dataset items: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } + setSelected([]) + } + } + + const onConfirm = () => { + setShowRowDialog(false) + setShowUploadDialog(false) + setShowDatasetDialog(false) + refresh(currentPage, pageLimit) + } + + useEffect(() => { + refresh(currentPage, pageLimit) + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + useEffect(() => { + if (getDatasetRows.data) { + const dataset = getDatasetRows.data + setDataset(dataset) + setTotal(dataset.total) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getDatasetRows.data]) + + useEffect(() => { + setLoading(getDatasetRows.loading) + }, [getDatasetRows.loading]) + + return ( + <> + + {error ? ( + + ) : ( + + window.history.back()} + search={false} + title={`Dataset : ${dataset?.name || ''}`} + description={dataset?.description} + > + } + > + Upload CSV + + } + > + New Item + + + {selected.length > 0 && ( + } + > + Delete {selected.length} {selected.length === 1 ? 'item' : 'items'} + + )} + {!isLoading && dataset?.rows?.length <= 0 ? ( + + + empty_datasetSVG + +
    No Dataset Items Yet
    + } + onClick={addNew} + > + New Item + +
    + ) : ( + + + + + + + + + Input + Expected Output + + + + + + + {isLoading ? ( + <> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ) : ( + <> + {(dataset?.rows || []).map((item, index) => ( + handleDragStart(e, index)} + onDragOver={(e) => e.preventDefault()} + onDragEnter={(e) => handleDragEnter(e, index)} + onDragEnd={(e) => handleDragEnd(e, index)} + hover + key={index} + sx={{ cursor: 'pointer', '&:last-child td, &:last-child th': { border: 0 } }} + > + setDraggable(false)} + onMouseUp={() => setDraggable(true)} + > + handleSelect(event, item.id)} + inputProps={{ + 'aria-labelledby': item.id + }} + /> + + edit(item)} + onMouseDown={() => setDraggable(false)} + onMouseUp={() => setDraggable(true)} + > + {item.input} + + edit(item)} + onMouseDown={() => setDraggable(false)} + onMouseUp={() => setDraggable(true)} + > + {item.output} + + + setDraggable(true)} + onMouseUp={() => setDraggable(false)} + /> + + + ))} + + )} + +
    +
    + + Use the drag icon at (extreme right) to reorder the dataset items + + {/* Pagination and Page Size Controls */} + +
    + )} +
    + )} +
    + setShowRowDialog(false)} + onConfirm={onConfirm} + > + {showUploadDialog && ( + setShowUploadDialog(false)} + onConfirm={onConfirm} + > + )} + {showDatasetDialog && ( + setShowDatasetDialog(false)} + onConfirm={onConfirm} + > + )} + + + ) +} + +export default EvalDatasetRows diff --git a/packages/ui/src/views/datasets/UploadCSVFileDialog.jsx b/packages/ui/src/views/datasets/UploadCSVFileDialog.jsx new file mode 100644 index 000000000..355a01767 --- /dev/null +++ b/packages/ui/src/views/datasets/UploadCSVFileDialog.jsx @@ -0,0 +1,196 @@ +import { createPortal } from 'react-dom' +import PropTypes from 'prop-types' +import { useState, useEffect } from 'react' +import { useDispatch } from 'react-redux' +import { enqueueSnackbar as enqueueSnackbarAction, closeSnackbar as closeSnackbarAction } from '@/store/actions' + +// Material +import { Button, Dialog, DialogActions, DialogContent, DialogTitle, Box, Typography } from '@mui/material' + +// Project imports +import { StyledButton } from '@/ui-component/button/StyledButton' +import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' +import { SwitchInput } from '@/ui-component/switch/Switch' +import { File } from '@/ui-component/file/File' +import { TooltipWithParser } from '@/ui-component/tooltip/TooltipWithParser' + +// Icons +import { IconX, IconDatabase } from '@tabler/icons-react' + +// API +import datasetApi from '@/api/dataset' + +// utils +import useNotifier from '@/utils/useNotifier' + +// const +import { HIDE_CANVAS_DIALOG, SHOW_CANVAS_DIALOG } from '@/store/actions' +const CSVFORMAT = `Only the first 2 columns will be considered: +---------------------------- +| Input | Output | +---------------------------- +| test input | test output | +---------------------------- +` + +const UploadCSVFileDialog = ({ show, dialogProps, onCancel, onConfirm }) => { + const portalElement = document.getElementById('portal') + + const dispatch = useDispatch() + + // ==============================|| Snackbar ||============================== // + + useNotifier() + + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const [datasetId, setDatasetId] = useState('') + const [datasetName, setDatasetName] = useState('') + const [firstRowHeaders, setFirstRowHeaders] = useState(false) + const [selectedFile, setSelectedFile] = useState() + const [dialogType, setDialogType] = useState('ADD') + + useEffect(() => { + setDatasetId(dialogProps.data.datasetId) + setDatasetName(dialogProps.data.datasetName) + setDialogType('ADD') + + return () => { + setDialogType('ADD') + setDatasetId('') + setDatasetName('') + setFirstRowHeaders(false) + setSelectedFile() + } + }, [dialogProps]) + + useEffect(() => { + if (show) dispatch({ type: SHOW_CANVAS_DIALOG }) + else dispatch({ type: HIDE_CANVAS_DIALOG }) + return () => dispatch({ type: HIDE_CANVAS_DIALOG }) + }, [show, dispatch]) + + const addNewDatasetRow = async () => { + try { + const obj = { + datasetId: datasetId, + firstRowHeaders: firstRowHeaders, + csvFile: selectedFile + } + const createResp = await datasetApi.createDatasetRow(obj) + if (createResp.data) { + enqueueSnackbar({ + message: 'New Row added for the given Dataset', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm(createResp.data.id) + } + } catch (error) { + enqueueSnackbar({ + message: `Failed to add new row in the Dataset: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + onCancel() + } + } + + const component = show ? ( + + +
    +
    + +
    + {'Upload Items to [' + datasetName + '] Dataset'} +
    +
    + + +
    + + Upload CSV + ${CSVFORMAT}`} /> + +
    +
    + setSelectedFile(newValue)} + value={selectedFile ?? 'Choose a file to upload'} + /> + +
    +
    + + + (dialogType === 'ADD' ? addNewDatasetRow() : saveDatasetRow())} + > + {dialogProps.confirmButtonName} + + + +
    + ) : null + + return createPortal(component, portalElement) +} + +UploadCSVFileDialog.propTypes = { + show: PropTypes.bool, + dialogProps: PropTypes.object, + onCancel: PropTypes.func, + onConfirm: PropTypes.func +} + +export default UploadCSVFileDialog diff --git a/packages/ui/src/views/datasets/index.jsx b/packages/ui/src/views/datasets/index.jsx new file mode 100644 index 000000000..95fdfb1e3 --- /dev/null +++ b/packages/ui/src/views/datasets/index.jsx @@ -0,0 +1,368 @@ +import { useEffect, useState } from 'react' +import { useDispatch, useSelector } from 'react-redux' +import moment from 'moment/moment' +import { useNavigate } from 'react-router-dom' + +// material-ui +import { + Skeleton, + Box, + Stack, + TableContainer, + Paper, + Table, + TableHead, + TableRow, + TableCell, + TableBody, + IconButton, + Button +} from '@mui/material' +import { useTheme } from '@mui/material/styles' + +// project imports +import MainCard from '@/ui-component/cards/MainCard' +import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' +import AddEditDatasetDialog from './AddEditDatasetDialog' +import ViewHeader from '@/layout/MainLayout/ViewHeader' +import ErrorBoundary from '@/ErrorBoundary' +import { StyledTableCell, StyledTableRow } from '@/ui-component/table/TableStyles' +import { StyledPermissionButton } from '@/ui-component/button/RBACButtons' +import { Available } from '@/ui-component/rbac/available' +import TablePagination, { DEFAULT_ITEMS_PER_PAGE } from '@/ui-component/pagination/TablePagination' + +// API +import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction } from '@/store/actions' +import useConfirm from '@/hooks/useConfirm' +import datasetsApi from '@/api/dataset' + +// Hooks +import useApi from '@/hooks/useApi' +import useNotifier from '@/utils/useNotifier' + +// icons +import empty_datasetSVG from '@/assets/images/empty_datasets.svg' +import { IconTrash, IconEdit, IconPlus, IconX } from '@tabler/icons-react' + +// Utils +import { truncateString } from '@/utils/genericHelper' + +import { useError } from '@/store/context/ErrorContext' + +// ==============================|| Datasets ||============================== // + +const EvalDatasets = () => { + const navigate = useNavigate() + const theme = useTheme() + const { confirm } = useConfirm() + const { error } = useError() + + const customization = useSelector((state) => state.customization) + + useNotifier() + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const [search, setSearch] = useState('') + const dispatch = useDispatch() + const [isLoading, setLoading] = useState(true) + const [datasets, setDatasets] = useState([]) + const [showDatasetDialog, setShowDatasetDialog] = useState(false) + const [datasetDialogProps, setDatasetDialogProps] = useState({}) + const getAllDatasets = useApi(datasetsApi.getAllDatasets) + + /* Table Pagination */ + const [currentPage, setCurrentPage] = useState(1) + const [pageLimit, setPageLimit] = useState(DEFAULT_ITEMS_PER_PAGE) + const [total, setTotal] = useState(0) + const onChange = (page, pageLimit) => { + setCurrentPage(page) + setPageLimit(pageLimit) + refresh(page, pageLimit) + } + + const refresh = (page, limit) => { + setLoading(true) + const params = { + page: page || currentPage, + limit: limit || pageLimit + } + getAllDatasets.request(params) + } + + const goToRows = (selectedDataset) => { + navigate(`/dataset_rows/${selectedDataset.id}?page=1&limit=10`) + } + + const onSearchChange = (event) => { + setSearch(event.target.value) + } + + const addNew = () => { + const dialogProp = { + type: 'ADD', + cancelButtonName: 'Cancel', + confirmButtonName: 'Add', + data: {} + } + setDatasetDialogProps(dialogProp) + setShowDatasetDialog(true) + } + + const edit = (dataset) => { + const dialogProp = { + type: 'EDIT', + cancelButtonName: 'Cancel', + confirmButtonName: 'Save', + data: dataset + } + setDatasetDialogProps(dialogProp) + setShowDatasetDialog(true) + } + + const deleteDataset = async (dataset) => { + const confirmPayload = { + title: `Delete`, + description: `Delete dataset ${dataset.name}?`, + confirmButtonName: 'Delete', + cancelButtonName: 'Cancel' + } + const isConfirmed = await confirm(confirmPayload) + + if (isConfirmed) { + try { + const deleteResp = await datasetsApi.deleteDataset(dataset.id) + if (deleteResp.data) { + enqueueSnackbar({ + message: 'Dataset deleted', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm() + } + } catch (error) { + enqueueSnackbar({ + message: `Failed to delete dataset: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } + } + } + + const onConfirm = () => { + setShowDatasetDialog(false) + refresh() + } + + function filterDatasets(data) { + return data.name.toLowerCase().indexOf(search.toLowerCase()) > -1 + } + + useEffect(() => { + refresh(currentPage, pageLimit) + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + useEffect(() => { + if (getAllDatasets.data) { + setDatasets(getAllDatasets.data?.data) + setTotal(getAllDatasets.data?.total) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getAllDatasets.data]) + + useEffect(() => { + setLoading(getAllDatasets.loading) + }, [getAllDatasets.loading]) + + return ( + <> + + {error ? ( + + ) : ( + + + } + > + Add New + + + {!isLoading && datasets.length <= 0 ? ( + + + empty_datasetSVG + +
    No Datasets Yet
    +
    + ) : ( + <> + + + + + Name + Description + Rows + Last Updated + + + + + + + + + + {isLoading ? ( + <> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ) : ( + <> + {datasets.filter(filterDatasets).map((ds, index) => ( + + goToRows(ds)} component='th' scope='row'> + {ds.name} + + goToRows(ds)} + style={{ wordWrap: 'break-word', flexWrap: 'wrap', width: '40%' }} + > + {truncateString(ds?.description, 200)} + + goToRows(ds)}>{ds?.rowCount} + goToRows(ds)}> + {moment(ds.updatedDate).format('MMMM Do YYYY, hh:mm A')} + + + + edit(ds)}> + + + + + + + deleteDataset(ds)} + > + + + + + + ))} + + )} + +
    +
    + {/* Pagination and Page Size Controls */} + + + )} +
    + )} +
    + setShowDatasetDialog(false)} + onConfirm={onConfirm} + > + + + ) +} + +export default EvalDatasets diff --git a/packages/ui/src/views/docstore/AddDocStoreDialog.jsx b/packages/ui/src/views/docstore/AddDocStoreDialog.jsx index f77eb9588..b6087184a 100644 --- a/packages/ui/src/views/docstore/AddDocStoreDialog.jsx +++ b/packages/ui/src/views/docstore/AddDocStoreDialog.jsx @@ -87,10 +87,11 @@ const AddDocStoreDialog = ({ show, dialogProps, onCancel, onConfirm }) => { }) onConfirm(createResp.data.id) } - } catch (err) { - const errorData = typeof err === 'string' ? err : err.response?.data || `${err.response.data.message}` + } catch (error) { enqueueSnackbar({ - message: `Failed to add new Document Store: ${errorData}`, + message: `Failed to add new Document Store: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, options: { key: new Date().getTime() + Math.random(), variant: 'error', @@ -130,9 +131,10 @@ const AddDocStoreDialog = ({ show, dialogProps, onCancel, onConfirm }) => { onConfirm(saveResp.data.id) } } catch (error) { - const errorData = error.response?.data || `${error.response?.status}: ${error.response?.statusText}` enqueueSnackbar({ - message: `Failed to update Document Store: ${errorData}`, + message: `Failed to update Document Store: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, options: { key: new Date().getTime() + Math.random(), variant: 'error', diff --git a/packages/ui/src/views/docstore/ComponentsListDialog.jsx b/packages/ui/src/views/docstore/ComponentsListDialog.jsx index 9e4658f3c..ec398d7bc 100644 --- a/packages/ui/src/views/docstore/ComponentsListDialog.jsx +++ b/packages/ui/src/views/docstore/ComponentsListDialog.jsx @@ -153,7 +153,11 @@ const ComponentsListDialog = ({ show, dialogProps, onCancel, apiCall, onSelected width: 50, height: 50, borderRadius: '50%', - backgroundColor: 'white' + backgroundColor: 'white', + flexShrink: 0, + display: 'flex', + alignItems: 'center', + justifyContent: 'center' }} > { const [nodeConfig, setNodeConfig] = useState({}) const [values, setValues] = useState('') const theme = useTheme() + const customization = useSelector((state) => state.customization) const [nodeConfigExpanded, setNodeConfigExpanded] = useState({}) const getConfigApi = useApi(documentstoreApi.getDocumentStoreConfig) @@ -24,7 +39,7 @@ const DocStoreAPIDialog = ({ show, dialogProps, onCancel }) => { import requests import json -API_URL = "http://localhost:3000/api/v1/document-store/upsert/${dialogProps.storeId}" +API_URL = "${baseURL}/api/v1/document-store/upsert/${dialogProps.storeId}" API_KEY = "your_api_key_here" # use form data to upload files @@ -37,6 +52,7 @@ body_data = { "metadata": {}, # Add additional metadata to the document chunks "replaceExisting": True, # Replace existing document with the new upserted chunks "createNewDocStore": False, # Create a new document store + "loaderName": "Custom Loader Name", # Override the loader name "splitter": json.dumps({"config":{"chunkSize":20000}}) # Override existing configuration # "loader": "", # "vectorStore": "", @@ -63,6 +79,7 @@ print(output) let formData = new FormData(); formData.append("files", input.files[0]); formData.append("docId", "${dialogProps.loaderId}"); +formData.append("loaderName", "Custom Loader Name"); formData.append("splitter", JSON.stringify({"config":{"chunkSize":20000}})); // Add additional metadata to the document chunks formData.append("metadata", "{}"); @@ -79,7 +96,7 @@ formData.append("createNewDocStore", "false"); async function query(formData) { const response = await fetch( - "http://localhost:3000/api/v1/document-store/upsert/${dialogProps.storeId}", + "${baseURL}/api/v1/document-store/upsert/${dialogProps.storeId}", { method: "POST", headers: { @@ -98,10 +115,11 @@ query(formData).then((response) => { \`\`\` \`\`\`bash -curl -X POST http://localhost:3000/api/v1/document-store/upsert/${dialogProps.storeId} \\ +curl -X POST ${baseURL}/api/v1/document-store/upsert/${dialogProps.storeId} \\ -H "Authorization: Bearer " \\ -F "files=@" \\ -F "docId=${dialogProps.loaderId}" \\ + -F "loaderName=Custom Loader Name" \\ -F "splitter={"config":{"chunkSize":20000}}" \\ -F "metadata={}" \\ -F "replaceExisting=true" \\ @@ -122,7 +140,7 @@ curl -X POST http://localhost:3000/api/v1/document-store/upsert/${dialogProps.st \`\`\`python import requests -API_URL = "http://localhost:3000/api/v1/document-store/upsert/${dialogProps.storeId}" +API_URL = "${baseURL}/api/v1/document-store/upsert/${dialogProps.storeId}" API_KEY = "your_api_key_here" headers = { @@ -138,6 +156,7 @@ output = query({ "metadata": "{}", # Add additional metadata to the document chunks "replaceExisting": True, # Replace existing document with the new upserted chunks "createNewDocStore": False, # Create a new document store + "loaderName": "Custom Loader Name", # Override the loader name # Override existing configuration "loader": { "config": { @@ -160,7 +179,7 @@ print(output) \`\`\`javascript async function query(data) { const response = await fetch( - "http://localhost:3000/api/v1/document-store/upsert/${dialogProps.storeId}", + "${baseURL}/api/v1/document-store/upsert/${dialogProps.storeId}", { method: "POST", headers: { @@ -175,10 +194,11 @@ async function query(data) { } query({ - "docId": "${dialogProps.loaderId}, + "docId": "${dialogProps.loaderId}", "metadata": "{}", // Add additional metadata to the document chunks "replaceExisting": true, // Replace existing document with the new upserted chunks "createNewDocStore": false, // Create a new document store + "loaderName": "Custom Loader Name", // Override the loader name // Override existing configuration "loader": { "config": { @@ -200,7 +220,7 @@ query({ \`\`\` \`\`\`bash -curl -X POST http://localhost:3000/api/v1/document-store/upsert/${dialogProps.storeId} \\ +curl -X POST ${baseURL}/api/v1/document-store/upsert/${dialogProps.storeId} \\ -H "Content-Type: application/json" \\ -H "Authorization: Bearer " \\ -d '{ @@ -208,6 +228,7 @@ curl -X POST http://localhost:3000/api/v1/document-store/upsert/${dialogProps.st "metadata": "{}", "replaceExisting": true, "createNewDocStore": false, + "loaderName": "Custom Loader Name", "loader": { "config": { "text": "This is a new text" @@ -303,6 +324,37 @@ curl -X POST http://localhost:3000/api/v1/document-store/upsert/${dialogProps.st {dialogProps.title} + {/* Info Box */} + + + + Note: Upsert API can only be used when the existing document loader has been upserted before. + + + + {/** info */} + {values} You can override existing configurations: diff --git a/packages/ui/src/views/docstore/DocStoreInputHandler.jsx b/packages/ui/src/views/docstore/DocStoreInputHandler.jsx index 7c6b3e521..dc67f4469 100644 --- a/packages/ui/src/views/docstore/DocStoreInputHandler.jsx +++ b/packages/ui/src/views/docstore/DocStoreInputHandler.jsx @@ -1,10 +1,10 @@ import PropTypes from 'prop-types' -import { useState } from 'react' +import { useState, useContext } from 'react' import { useSelector } from 'react-redux' // material-ui import { Box, Typography, IconButton, Button } from '@mui/material' -import { IconRefresh, IconArrowsMaximize, IconAlertTriangle } from '@tabler/icons-react' +import { IconArrowsMaximize, IconAlertTriangle, IconRefresh } from '@tabler/icons-react' // project import import { Dropdown } from '@/ui-component/dropdown/Dropdown' @@ -17,17 +17,21 @@ import { SwitchInput } from '@/ui-component/switch/Switch' import { JsonEditorInput } from '@/ui-component/json/JsonEditor' import { TooltipWithParser } from '@/ui-component/tooltip/TooltipWithParser' import { CodeEditor } from '@/ui-component/editor/CodeEditor' +import { ArrayRenderer } from '@/ui-component/array/ArrayRenderer' import ExpandTextDialog from '@/ui-component/dialog/ExpandTextDialog' import ManageScrapedLinksDialog from '@/ui-component/dialog/ManageScrapedLinksDialog' import CredentialInputHandler from '@/views/canvas/CredentialInputHandler' +import { flowContext } from '@/store/context/ReactFlowContext' // const import { FLOWISE_CREDENTIAL_ID } from '@/store/constant' // ===========================|| DocStoreInputHandler ||=========================== // -const DocStoreInputHandler = ({ inputParam, data, disabled = false }) => { +const DocStoreInputHandler = ({ inputParam, data, disabled = false, onNodeDataChange }) => { const customization = useSelector((state) => state.customization) + const flowContextValue = useContext(flowContext) + const nodeDataChangeHandler = onNodeDataChange || flowContextValue?.onNodeDataChange const [showExpandDialog, setShowExpandDialog] = useState(false) const [expandDialogProps, setExpandDialogProps] = useState({}) @@ -35,6 +39,14 @@ const DocStoreInputHandler = ({ inputParam, data, disabled = false }) => { const [manageScrapedLinksDialogProps, setManageScrapedLinksDialogProps] = useState({}) const [reloadTimestamp, setReloadTimestamp] = useState(Date.now().toString()) + const handleDataChange = ({ inputParam, newValue }) => { + data.inputs[inputParam.name] = newValue + const allowedShowHideInputTypes = ['boolean', 'asyncOptions', 'asyncMultiOptions', 'options', 'multiOptions'] + if (allowedShowHideInputTypes.includes(inputParam.type) && nodeDataChangeHandler) { + nodeDataChangeHandler({ nodeId: data.id, inputParam, newValue }) + } + } + const onExpandDialogClicked = (value, inputParam) => { const dialogProps = { value, @@ -149,7 +161,7 @@ const DocStoreInputHandler = ({ inputParam, data, disabled = false }) => { {inputParam.type === 'boolean' && ( (data.inputs[inputParam.name] = newValue)} + onChange={(newValue) => handleDataChange({ inputParam, newValue })} value={data.inputs[inputParam.name] ?? inputParam.default ?? false} /> )} @@ -203,7 +215,7 @@ const DocStoreInputHandler = ({ inputParam, data, disabled = false }) => { disabled={disabled} name={inputParam.name} options={inputParam.options} - onSelect={(newValue) => (data.inputs[inputParam.name] = newValue)} + onSelect={(newValue) => handleDataChange({ inputParam, newValue })} value={data.inputs[inputParam.name] ?? inputParam.default ?? 'choose an option'} /> )} @@ -213,7 +225,7 @@ const DocStoreInputHandler = ({ inputParam, data, disabled = false }) => { disabled={disabled} name={inputParam.name} options={inputParam.options} - onSelect={(newValue) => (data.inputs[inputParam.name] = newValue)} + onSelect={(newValue) => handleDataChange({ inputParam, newValue })} value={data.inputs[inputParam.name] ?? inputParam.default ?? 'choose an option'} /> )} @@ -230,8 +242,9 @@ const DocStoreInputHandler = ({ inputParam, data, disabled = false }) => { freeSolo={inputParam.freeSolo} multiple={inputParam.type === 'asyncMultiOptions'} value={data.inputs[inputParam.name] ?? inputParam.default ?? 'choose an option'} - onSelect={(newValue) => (data.inputs[inputParam.name] = newValue)} + onSelect={(newValue) => handleDataChange({ inputParam, newValue })} onCreateNew={() => addAsyncOption(inputParam.name)} + fullWidth={true} />
    {inputParam.refresh && ( @@ -247,6 +260,9 @@ const DocStoreInputHandler = ({ inputParam, data, disabled = false }) => {
    )} + {inputParam.type === 'array' && ( + + )} {(data.name === 'cheerioWebScraper' || data.name === 'puppeteerWebScraper' || data.name === 'playwrightWebScraper') && @@ -296,7 +312,8 @@ const DocStoreInputHandler = ({ inputParam, data, disabled = false }) => { DocStoreInputHandler.propTypes = { inputParam: PropTypes.object, data: PropTypes.object, - disabled: PropTypes.bool + disabled: PropTypes.bool, + onNodeDataChange: PropTypes.func } export default DocStoreInputHandler diff --git a/packages/ui/src/views/docstore/DocumentLoaderListDialog.jsx b/packages/ui/src/views/docstore/DocumentLoaderListDialog.jsx index a8722ed35..8a1caa38d 100644 --- a/packages/ui/src/views/docstore/DocumentLoaderListDialog.jsx +++ b/packages/ui/src/views/docstore/DocumentLoaderListDialog.jsx @@ -153,7 +153,11 @@ const DocumentLoaderListDialog = ({ show, dialogProps, onCancel, onDocLoaderSele width: 50, height: 50, borderRadius: '50%', - backgroundColor: 'white' + backgroundColor: 'white', + flexShrink: 0, + display: 'flex', + alignItems: 'center', + justifyContent: 'center' }} > { const customization = useSelector((state) => state.customization) const navigate = useNavigate() const dispatch = useDispatch() + const { hasAssignedWorkspace } = useAuth() useNotifier() const { confirm } = useConfirm() const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + const { error, setError } = useError() + const { hasPermission } = useAuth() const getSpecificDocumentStore = useApi(documentsApi.getSpecificDocumentStore) - const [error, setError] = useState(null) const [isLoading, setLoading] = useState(true) const [isBackdropLoading, setBackdropLoading] = useState(false) const [showDialog, setShowDialog] = useState(false) @@ -400,20 +405,17 @@ const DocumentStoreDetails = () => { useEffect(() => { if (getSpecificDocumentStore.data) { + const workspaceId = getSpecificDocumentStore.data.workspaceId + if (!hasAssignedWorkspace(workspaceId)) { + navigate('/unauthorized') + return + } setDocumentStore(getSpecificDocumentStore.data) - // total the chunks and chars } // eslint-disable-next-line react-hooks/exhaustive-deps }, [getSpecificDocumentStore.data]) - useEffect(() => { - if (getSpecificDocumentStore.error) { - setError(getSpecificDocumentStore.error) - } - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [getSpecificDocumentStore.error]) - useEffect(() => { setLoading(getSpecificDocumentStore.loading) }, [getSpecificDocumentStore.loading]) @@ -427,7 +429,7 @@ const DocumentStoreDetails = () => { { onEdit={() => onEditClicked()} > {(documentStore?.status === 'STALE' || documentStore?.status === 'UPSERTING') && ( - + - + )} - } onClick={listLoaders} > Add Document Loader - + - - - - Preview & Process - - - - View & Edit Chunks - - - - Upsert Chunks - - - - View API - - - - - Delete - - -
    - + + +
    + + + + + + Preview & Process + + + + + + View & Edit Chunks + + + + + + Upsert Chunks + + + + + + View API + + + + + + + Delete + + + +
    +
    +
    ) diff --git a/packages/ui/src/views/docstore/ExpandedChunkDialog.jsx b/packages/ui/src/views/docstore/ExpandedChunkDialog.jsx index a0b85b10f..0d7976e00 100644 --- a/packages/ui/src/views/docstore/ExpandedChunkDialog.jsx +++ b/packages/ui/src/views/docstore/ExpandedChunkDialog.jsx @@ -11,6 +11,7 @@ import { IconEdit, IconTrash, IconX, IconLanguage } from '@tabler/icons-react' // Project imports import { CodeEditor } from '@/ui-component/editor/CodeEditor' +import { PermissionButton, PermissionIconButton } from '@/ui-component/button/RBACButtons' const ExpandedChunkDialog = ({ show, dialogProps, onCancel, onChunkEdit, onDeleteChunk, isReadOnly }) => { const portalElement = document.getElementById('portal') @@ -87,9 +88,16 @@ const ExpandedChunkDialog = ({ show, dialogProps, onCancel, onChunkEdit, onDelet #{selectedChunkNumber}. {selectedChunk.id} {!isEdit && !isReadOnly && ( - setIsEdit(true)} size='small' color='primary' title='Edit Chunk' sx={{ ml: 2 }}> + setIsEdit(true)} + size='small' + color='primary' + title='Edit Chunk' + sx={{ ml: 2 }} + > - + )} {isEdit && !isReadOnly && ( )} {isEdit && !isReadOnly && ( - + )} {!isEdit && !isReadOnly && ( - onDeleteChunk(selectedChunk)} size='small' color='error' @@ -116,7 +126,7 @@ const ExpandedChunkDialog = ({ show, dialogProps, onCancel, onChunkEdit, onDelet sx={{ ml: 1 }} > - + )} diff --git a/packages/ui/src/views/docstore/LoaderConfigPreviewChunks.jsx b/packages/ui/src/views/docstore/LoaderConfigPreviewChunks.jsx index 4554f393a..618d67c13 100644 --- a/packages/ui/src/views/docstore/LoaderConfigPreviewChunks.jsx +++ b/packages/ui/src/views/docstore/LoaderConfigPreviewChunks.jsx @@ -7,6 +7,7 @@ import ReactJson from 'flowise-react-json-view' // Hooks import useApi from '@/hooks/useApi' +import { useAuth } from '@/hooks/useAuth' // Material-UI import { Skeleton, Toolbar, Box, Button, Card, CardContent, Grid, OutlinedInput, Stack, Typography, TextField } from '@mui/material' @@ -31,9 +32,10 @@ import documentsApi from '@/api/documentstore' // Const import { baseURL, gridSpacing } from '@/store/constant' import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction } from '@/store/actions' +import { useError } from '@/store/context/ErrorContext' // Utils -import { initNode } from '@/utils/genericHelper' +import { initNode, showHideInputParams } from '@/utils/genericHelper' import useNotifier from '@/utils/useNotifier' const CardWrapper = styled(MainCard)(({ theme }) => ({ @@ -61,6 +63,8 @@ const LoaderConfigPreviewChunks = () => { const customization = useSelector((state) => state.customization) const navigate = useNavigate() const theme = useTheme() + const { error } = useError() + const { hasAssignedWorkspace } = useAuth() const getNodeDetailsApi = useApi(nodesApi.getSpecificNode) const getNodesByCategoryApi = useApi(nodesApi.getNodesByCategory) @@ -71,7 +75,6 @@ const LoaderConfigPreviewChunks = () => { const [selectedDocumentLoader, setSelectedDocumentLoader] = useState({}) const [loading, setLoading] = useState(false) - const [error, setError] = useState(null) const [loaderName, setLoaderName] = useState('') const [textSplitterNodes, setTextSplitterNodes] = useState([]) @@ -95,6 +98,24 @@ const LoaderConfigPreviewChunks = () => { const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + const handleDocumentLoaderDataChange = ({ inputParam, newValue }) => { + setSelectedDocumentLoader((prevData) => { + const updatedData = { ...prevData } + updatedData.inputs[inputParam.name] = newValue + updatedData.inputParams = showHideInputParams(updatedData) + return updatedData + }) + } + + const handleTextSplitterDataChange = ({ inputParam, newValue }) => { + setSelectedTextSplitter((prevData) => { + const updatedData = { ...prevData } + updatedData.inputs[inputParam.name] = newValue + updatedData.inputParams = showHideInputParams(updatedData) + return updatedData + }) + } + const onSplitterChange = (name) => { const textSplitter = (textSplitterNodes ?? []).find((splitter) => splitter.name === name) if (textSplitter) { @@ -117,21 +138,27 @@ const LoaderConfigPreviewChunks = () => { const checkMandatoryFields = () => { let canSubmit = true + const missingFields = [] const inputParams = (selectedDocumentLoader.inputParams ?? []).filter((inputParam) => !inputParam.hidden) for (const inputParam of inputParams) { if (!inputParam.optional && (!selectedDocumentLoader.inputs[inputParam.name] || !selectedDocumentLoader.credential)) { - if (inputParam.type === 'credential' && !selectedDocumentLoader.credential) { + if ( + inputParam.type === 'credential' && + !selectedDocumentLoader.credential && + !selectedDocumentLoader.inputs['FLOWISE_CREDENTIAL_ID'] + ) { canSubmit = false - break + missingFields.push(inputParam.label || inputParam.name) } else if (inputParam.type !== 'credential' && !selectedDocumentLoader.inputs[inputParam.name]) { canSubmit = false - break + missingFields.push(inputParam.label || inputParam.name) } } } if (!canSubmit) { + const fieldsList = missingFields.join(', ') enqueueSnackbar({ - message: 'Please fill in all mandatory fields.', + message: `Please fill in the following mandatory fields: ${fieldsList}`, options: { key: new Date().getTime() + Math.random(), variant: 'warning', @@ -156,7 +183,7 @@ const LoaderConfigPreviewChunks = () => { const previewResp = await documentStoreApi.previewChunks(config) if (previewResp.data) { setTotalChunks(previewResp.data.totalChunks) - setDocumentChunks(previewResp.data.chunks) + setDocumentChunks(Array.isArray(previewResp.data.chunks) ? previewResp.data.chunks : []) setCurrentPreviewCount(previewResp.data.previewChunkCount) } setLoading(false) @@ -335,6 +362,11 @@ const LoaderConfigPreviewChunks = () => { useEffect(() => { if (getSpecificDocumentStoreApi.data) { + const workspaceId = getSpecificDocumentStoreApi.data.workspaceId + if (!hasAssignedWorkspace(workspaceId)) { + navigate('/unauthorized') + return + } if (getSpecificDocumentStoreApi.data?.loaders.length > 0) { const loader = getSpecificDocumentStoreApi.data.loaders.find((loader) => loader.id === docLoaderNodeName) if (loader) { @@ -347,30 +379,6 @@ const LoaderConfigPreviewChunks = () => { // eslint-disable-next-line react-hooks/exhaustive-deps }, [getSpecificDocumentStoreApi.data]) - useEffect(() => { - if (getSpecificDocumentStoreApi.error) { - setError(getSpecificDocumentStoreApi.error) - } - - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [getSpecificDocumentStoreApi.error]) - - useEffect(() => { - if (getNodeDetailsApi.error) { - setError(getNodeDetailsApi.error) - } - - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [getNodeDetailsApi.error]) - - useEffect(() => { - if (getNodesByCategoryApi.error) { - setError(getNodesByCategoryApi.error) - } - - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [getNodesByCategoryApi.error]) - return ( <> @@ -462,13 +470,14 @@ const LoaderConfigPreviewChunks = () => { {selectedDocumentLoader && Object.keys(selectedDocumentLoader).length > 0 && - (selectedDocumentLoader.inputParams ?? []) - .filter((inputParam) => !inputParam.hidden) + showHideInputParams(selectedDocumentLoader) + .filter((inputParam) => !inputParam.hidden && inputParam.display !== false) .map((inputParam, index) => ( ))} {textSplitterNodes && textSplitterNodes.length > 0 && ( @@ -521,10 +530,15 @@ const LoaderConfigPreviewChunks = () => { )} {Object.keys(selectedTextSplitter).length > 0 && - (selectedTextSplitter.inputParams ?? []) - .filter((inputParam) => !inputParam.hidden) + showHideInputParams(selectedTextSplitter) + .filter((inputParam) => !inputParam.hidden && inputParam.display !== false) .map((inputParam, index) => ( - + ))}
    diff --git a/packages/ui/src/views/docstore/ShowStoredChunks.jsx b/packages/ui/src/views/docstore/ShowStoredChunks.jsx index 07d477e8a..e02ecadb4 100644 --- a/packages/ui/src/views/docstore/ShowStoredChunks.jsx +++ b/packages/ui/src/views/docstore/ShowStoredChunks.jsx @@ -16,6 +16,7 @@ import { BackdropLoader } from '@/ui-component/loading/BackdropLoader' import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' import ExpandedChunkDialog from './ExpandedChunkDialog' import ViewHeader from '@/layout/MainLayout/ViewHeader' +import ErrorBoundary from '@/ErrorBoundary' // API import documentsApi from '@/api/documentstore' @@ -24,9 +25,11 @@ import documentsApi from '@/api/documentstore' import useApi from '@/hooks/useApi' import useConfirm from '@/hooks/useConfirm' import useNotifier from '@/utils/useNotifier' +import { useAuth } from '@/hooks/useAuth' // store import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction } from '@/store/actions' +import { useError } from '@/store/context/ErrorContext' const CardWrapper = styled(MainCard)(({ theme }) => ({ background: theme.palette.card.main, @@ -53,6 +56,8 @@ const ShowStoredChunks = () => { const dispatch = useDispatch() const theme = useTheme() const { confirm } = useConfirm() + const { error } = useError() + const { hasAssignedWorkspace } = useAuth() useNotifier() const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) @@ -196,6 +201,11 @@ const ShowStoredChunks = () => { useEffect(() => { if (getChunksApi.data) { const data = getChunksApi.data + const workspaceId = data.workspaceId + if (!hasAssignedWorkspace(workspaceId)) { + navigate('/unauthorized') + return + } setTotalChunks(data.count) setDocumentChunks(data.chunks) setLoading(false) @@ -217,156 +227,160 @@ const ShowStoredChunks = () => { return ( <> - - navigate(-1)} - > -
    - {fileNames.length > 0 && ( - - {fileNames.map((fileName, index) => ( -
    + ) : ( + + navigate(-1)} + > +
    + {fileNames.length > 0 && ( + + {fileNames.map((fileName, index) => ( +
    + {fileName} +
    + ))} +
    + )} +
    +
    + changePage(currentPage - 1)} + style={{ marginRight: 10 }} + variant='outlined' + disabled={currentPage === 1} > - {fileName} -
    - ))} - - )} -
    -
    - changePage(currentPage - 1)} - style={{ marginRight: 10 }} - variant='outlined' - disabled={currentPage === 1} - > - - - Showing {Math.min(start, totalChunks)}-{end} of {totalChunks} chunks - changePage(currentPage + 1)} - style={{ marginLeft: 10 }} - variant='outlined' - disabled={end >= totalChunks} - > - = totalChunks - ? '#616161' - : 'white' - : end >= totalChunks - ? '#e0e0e0' - : 'black' - } - /> - -
    -
    - - {getChunksApi.data?.characters?.toLocaleString()} characters + + + Showing {Math.min(start, totalChunks)}-{end} of {totalChunks} chunks + changePage(currentPage + 1)} + style={{ marginLeft: 10 }} + variant='outlined' + disabled={end >= totalChunks} + > + = totalChunks + ? '#616161' + : 'white' + : end >= totalChunks + ? '#e0e0e0' + : 'black' + } + /> + +
    +
    + + {getChunksApi.data?.characters?.toLocaleString()} characters +
    -
    -
    - - {!documentChunks.length && ( -
    - - chunks_emptySVG - -
    No Chunks
    -
    - )} - {documentChunks.length > 0 && - documentChunks.map((row, index) => ( - - chunkSelected(row.id)} - sx={{ border: 1, borderColor: theme.palette.grey[900] + 25, borderRadius: 2 }} - > - - - - {`#${row.chunkNo}. Characters: ${row.pageContent.length}`} - - - {row.pageContent} - - - - - - - ))} -
    -
    -
    +
    + + {!documentChunks.length && ( +
    + + chunks_emptySVG + +
    No Chunks
    +
    + )} + {documentChunks.length > 0 && + documentChunks.map((row, index) => ( + + chunkSelected(row.id)} + sx={{ border: 1, borderColor: theme.palette.grey[900] + 25, borderRadius: 2 }} + > + + + + {`#${row.chunkNo}. Characters: ${row.pageContent.length}`} + + + {row.pageContent} + + + + + + + ))} +
    +
    + + )} { const navigate = useNavigate() const dispatch = useDispatch() + const { hasAssignedWorkspace } = useAuth() useNotifier() + const { error, setError } = useError() const customization = useSelector((state) => state.customization) const { storeId, docId } = useParams() @@ -62,9 +66,7 @@ const VectorStoreConfigure = () => { const getVectorStoreNodeDetailsApi = useApi(nodesApi.getSpecificNode) const getRecordManagerNodeDetailsApi = useApi(nodesApi.getSpecificNode) - const [error, setError] = useState(null) const [loading, setLoading] = useState(true) - const [documentStore, setDocumentStore] = useState({}) const [dialogProps, setDialogProps] = useState({}) @@ -87,6 +89,33 @@ const VectorStoreConfigure = () => { const [showUpsertHistoryDetailsDialog, setShowUpsertHistoryDetailsDialog] = useState(false) const [upsertDetailsDialogProps, setUpsertDetailsDialogProps] = useState({}) + const handleEmbeddingsProviderDataChange = ({ inputParam, newValue }) => { + setSelectedEmbeddingsProvider((prevData) => { + const updatedData = { ...prevData } + updatedData.inputs[inputParam.name] = newValue + updatedData.inputParams = showHideInputParams(updatedData) + return updatedData + }) + } + + const handleVectorStoreProviderDataChange = ({ inputParam, newValue }) => { + setSelectedVectorStoreProvider((prevData) => { + const updatedData = { ...prevData } + updatedData.inputs[inputParam.name] = newValue + updatedData.inputParams = showHideInputParams(updatedData) + return updatedData + }) + } + + const handleRecordManagerProviderDataChange = ({ inputParam, newValue }) => { + setSelectedRecordManagerProvider((prevData) => { + const updatedData = { ...prevData } + updatedData.inputs[inputParam.name] = newValue + updatedData.inputParams = showHideInputParams(updatedData) + return updatedData + }) + } + const onEmbeddingsSelected = (component) => { const nodeData = cloneDeep(initNode(component, uuidv4())) if (!showEmbeddingsListDialog && documentStore.embeddingConfig) { @@ -377,6 +406,10 @@ const VectorStoreConfigure = () => { useEffect(() => { if (getSpecificDocumentStoreApi.data) { const docStore = getSpecificDocumentStoreApi.data + if (!hasAssignedWorkspace(docStore.workspaceId)) { + navigate('/unauthorized') + return + } setDocumentStore(docStore) if (docStore.embeddingConfig) { getEmbeddingNodeDetailsApi.request(docStore.embeddingConfig.name) @@ -593,14 +626,17 @@ const VectorStoreConfigure = () => { {selectedEmbeddingsProvider && Object.keys(selectedEmbeddingsProvider).length > 0 && - (selectedEmbeddingsProvider.inputParams ?? []) - .filter((inputParam) => !inputParam.hidden) + showHideInputParams(selectedEmbeddingsProvider) + .filter( + (inputParam) => !inputParam.hidden && inputParam.display !== false + ) .map((inputParam, index) => ( ))}
    @@ -708,14 +744,17 @@ const VectorStoreConfigure = () => { {selectedVectorStoreProvider && Object.keys(selectedVectorStoreProvider).length > 0 && - (selectedVectorStoreProvider.inputParams ?? []) - .filter((inputParam) => !inputParam.hidden) + showHideInputParams(selectedVectorStoreProvider) + .filter( + (inputParam) => !inputParam.hidden && inputParam.display !== false + ) .map((inputParam, index) => ( ))}
    @@ -831,17 +870,18 @@ const VectorStoreConfigure = () => { {selectedRecordManagerProvider && Object.keys(selectedRecordManagerProvider).length > 0 && - (selectedRecordManagerProvider.inputParams ?? []) - .filter((inputParam) => !inputParam.hidden) + showHideInputParams(selectedRecordManagerProvider) + .filter( + (inputParam) => !inputParam.hidden && inputParam.display !== false + ) .map((inputParam, index) => ( - <> - - + ))}
    diff --git a/packages/ui/src/views/docstore/VectorStoreQuery.jsx b/packages/ui/src/views/docstore/VectorStoreQuery.jsx index 100683912..f0650ae41 100644 --- a/packages/ui/src/views/docstore/VectorStoreQuery.jsx +++ b/packages/ui/src/views/docstore/VectorStoreQuery.jsx @@ -20,6 +20,7 @@ import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' import ExpandedChunkDialog from './ExpandedChunkDialog' import ViewHeader from '@/layout/MainLayout/ViewHeader' import DocStoreInputHandler from '@/views/docstore/DocStoreInputHandler' +import { PermissionButton } from '@/ui-component/button/RBACButtons' // API import documentsApi from '@/api/documentstore' @@ -27,9 +28,10 @@ import nodesApi from '@/api/nodes' // Hooks import useApi from '@/hooks/useApi' +import { useAuth } from '@/hooks/useAuth' import useNotifier from '@/utils/useNotifier' import { baseURL } from '@/store/constant' -import { initNode } from '@/utils/genericHelper' +import { initNode, showHideInputParams } from '@/utils/genericHelper' import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction } from '@/store/actions' const CardWrapper = styled(MainCard)(({ theme }) => ({ @@ -57,6 +59,7 @@ const VectorStoreQuery = () => { const theme = useTheme() const dispatch = useDispatch() const inputRef = useRef(null) + const { hasAssignedWorkspace } = useAuth() useNotifier() @@ -81,6 +84,15 @@ const VectorStoreQuery = () => { const getVectorStoreNodeDetailsApi = useApi(nodesApi.getSpecificNode) const [selectedVectorStoreProvider, setSelectedVectorStoreProvider] = useState({}) + const handleVectorStoreProviderDataChange = ({ inputParam, newValue }) => { + setSelectedVectorStoreProvider((prevData) => { + const updatedData = { ...prevData } + updatedData.inputs[inputParam.name] = newValue + updatedData.inputParams = showHideInputParams(updatedData) + return updatedData + }) + } + const chunkSelected = (chunkId, selectedChunkNumber) => { const selectedChunk = documentChunks.find((chunk) => chunk.id === chunkId) const dialogProps = { @@ -227,6 +239,10 @@ const VectorStoreQuery = () => { useEffect(() => { if (getSpecificDocumentStoreApi.data) { + if (!hasAssignedWorkspace(getSpecificDocumentStoreApi.data.workspaceId)) { + navigate('/unauthorized') + return + } setDocumentStore(getSpecificDocumentStoreApi.data) const vectorStoreConfig = getSpecificDocumentStoreApi.data.vectorStoreConfig if (vectorStoreConfig) { @@ -249,7 +265,8 @@ const VectorStoreQuery = () => { description='Retrieval Playground - Test your vector store retrieval settings' onBack={() => navigate(-1)} > - +
    @@ -346,14 +363,15 @@ const VectorStoreQuery = () => { {selectedVectorStoreProvider && Object.keys(selectedVectorStoreProvider).length > 0 && - (selectedVectorStoreProvider.inputParams ?? []) - .filter((inputParam) => !inputParam.hidden) + showHideInputParams(selectedVectorStoreProvider) + .filter((inputParam) => !inputParam.hidden && inputParam.display !== false) .map((inputParam, index) => ( ))}
    diff --git a/packages/ui/src/views/docstore/index.jsx b/packages/ui/src/views/docstore/index.jsx index b399af873..60e45aac9 100644 --- a/packages/ui/src/views/docstore/index.jsx +++ b/packages/ui/src/views/docstore/index.jsx @@ -1,33 +1,19 @@ -import { useEffect, useState } from 'react' +import React, { useEffect, useState } from 'react' import { useNavigate } from 'react-router-dom' -import { useSelector } from 'react-redux' // material-ui -import { - Box, - Paper, - Skeleton, - Stack, - Table, - TableBody, - TableCell, - TableContainer, - TableHead, - TableRow, - ToggleButton, - ToggleButtonGroup, - Typography -} from '@mui/material' +import { Box, Stack, ToggleButton, ToggleButtonGroup } from '@mui/material' import { useTheme } from '@mui/material/styles' // project imports -import MainCard from '@/ui-component/cards/MainCard' -import DocumentStoreCard from '@/ui-component/cards/DocumentStoreCard' -import { StyledButton } from '@/ui-component/button/StyledButton' -import AddDocStoreDialog from '@/views/docstore/AddDocStoreDialog' import ErrorBoundary from '@/ErrorBoundary' +import { useError } from '@/store/context/ErrorContext' +import MainCard from '@/ui-component/cards/MainCard' +import TablePagination, { DEFAULT_ITEMS_PER_PAGE } from '@/ui-component/pagination/TablePagination' +import DocumentStoreCard from '@/ui-component/cards/DocumentStoreCard' +import AddDocStoreDialog from '@/views/docstore/AddDocStoreDialog' import ViewHeader from '@/layout/MainLayout/ViewHeader' -import DocumentStoreStatus from '@/views/docstore/DocumentStoreStatus' +import { StyledPermissionButton } from '@/ui-component/button/RBACButtons' // API import useApi from '@/hooks/useApi' @@ -39,17 +25,17 @@ import doc_store_empty from '@/assets/images/doc_store_empty.svg' // const import { baseURL, gridSpacing } from '@/store/constant' +import { DocumentStoreTable } from '@/ui-component/table/DocumentStoreTable' // ==============================|| DOCUMENTS ||============================== // const Documents = () => { const theme = useTheme() - const customization = useSelector((state) => state.customization) const navigate = useNavigate() const getAllDocumentStores = useApi(documentsApi.getAllDocumentStores) + const { error } = useError() - const [error, setError] = useState(null) const [isLoading, setLoading] = useState(true) const [images, setImages] = useState({}) const [search, setSearch] = useState('') @@ -65,7 +51,9 @@ const Documents = () => { } function filterDocStores(data) { - return data.name.toLowerCase().indexOf(search.toLowerCase()) > -1 + return ( + data.name.toLowerCase().indexOf(search.toLowerCase()) > -1 || data.description.toLowerCase().indexOf(search.toLowerCase()) > -1 + ) } const onSearchChange = (event) => { @@ -89,41 +77,61 @@ const Documents = () => { const onConfirm = () => { setShowDialog(false) - getAllDocumentStores.request() + applyFilters(currentPage, pageLimit) } useEffect(() => { - getAllDocumentStores.request() + applyFilters(currentPage, pageLimit) // eslint-disable-next-line react-hooks/exhaustive-deps }, []) + /* Table Pagination */ + const [currentPage, setCurrentPage] = useState(1) + const [pageLimit, setPageLimit] = useState(DEFAULT_ITEMS_PER_PAGE) + const [total, setTotal] = useState(0) + const onChange = (page, pageLimit) => { + setCurrentPage(page) + setPageLimit(pageLimit) + applyFilters(page, pageLimit) + } + + const applyFilters = (page, limit) => { + setLoading(true) + const params = { + page: page || currentPage, + limit: limit || pageLimit + } + getAllDocumentStores.request(params) + } + useEffect(() => { if (getAllDocumentStores.data) { try { - const docStores = getAllDocumentStores.data - if (!Array.isArray(docStores)) return + const { data, total } = getAllDocumentStores.data + if (!Array.isArray(data)) return const loaderImages = {} - for (let i = 0; i < docStores.length; i += 1) { - const loaders = docStores[i].loaders ?? [] + for (let i = 0; i < data.length; i += 1) { + const loaders = data[i].loaders ?? [] let totalChunks = 0 let totalChars = 0 - loaderImages[docStores[i].id] = [] + loaderImages[data[i].id] = [] for (let j = 0; j < loaders.length; j += 1) { const imageSrc = `${baseURL}/api/v1/node-icon/${loaders[j].loaderId}` - if (!loaderImages[docStores[i].id].includes(imageSrc)) { - loaderImages[docStores[i].id].push(imageSrc) + if (!loaderImages[data[i].id].includes(imageSrc)) { + loaderImages[data[i].id].push(imageSrc) } totalChunks += loaders[j]?.totalChunks ?? 0 totalChars += loaders[j]?.totalChars ?? 0 } - docStores[i].totalDocs = loaders?.length ?? 0 - docStores[i].totalChunks = totalChunks - docStores[i].totalChars = totalChars + data[i].totalDocs = loaders?.length ?? 0 + data[i].totalChunks = totalChunks + data[i].totalChars = totalChars } - setDocStores(docStores) + setDocStores(data) + setTotal(total) setImages(loaderImages) } catch (e) { console.error(e) @@ -135,9 +143,7 @@ const Documents = () => { setLoading(getAllDocumentStores.loading) }, [getAllDocumentStores.loading]) - useEffect(() => { - setError(getAllDocumentStores.error) - }, [getAllDocumentStores.error]) + const hasDocStores = docStores && docStores.length > 0 return ( @@ -147,44 +153,47 @@ const Documents = () => { - - - - - - - - - + + + + + + + )} + { id='btn_createVariable' > Add New - + - {!view || view === 'card' ? ( - <> - {isLoading && !docStores ? ( - - - - - - ) : ( - - {docStores?.filter(filterDocStores).map((data, index) => ( - goToDocumentStore(data.id)} - /> - ))} - - )} - - ) : ( - - - - -   - Name - Description - Connected flows - Total characters - Total chunks - Loader types - - - - {docStores?.filter(filterDocStores).map((data, index) => ( - goToDocumentStore(data.id)} - hover - key={index} - sx={{ cursor: 'pointer', '&:last-child td, &:last-child th': { border: 0 } }} - > - - - - - - {data.name} - - - - - {data?.description} - - - {data.whereUsed?.length ?? 0} - {data.totalChars} - {data.totalChunks} - - {images[data.id] && ( - - {images[data.id].slice(0, images.length > 3 ? 3 : images.length).map((img) => ( - - - - ))} - {images.length > 3 && ( - - + {images.length - 3} More - - )} - - )} - - - ))} - -
    -
    - )} - {!isLoading && (!docStores || docStores.length === 0) && ( + {!hasDocStores ? ( {
    No Document Stores Created Yet
    + ) : ( + + {!view || view === 'card' ? ( + + {docStores?.filter(filterDocStores).map((data, index) => ( + goToDocumentStore(data.id)} + /> + ))} + + ) : ( + goToDocumentStore(row.id)} + /> + )} + {/* Pagination and Page Size Controls */} + + )}
    )} diff --git a/packages/ui/src/views/evaluations/ChartLatency.jsx b/packages/ui/src/views/evaluations/ChartLatency.jsx new file mode 100644 index 000000000..762ac533a --- /dev/null +++ b/packages/ui/src/views/evaluations/ChartLatency.jsx @@ -0,0 +1,59 @@ +import { CartesianGrid, Line, LineChart, ResponsiveContainer, XAxis, YAxis, Tooltip } from 'recharts' +import PropTypes from 'prop-types' + +const empty = [] + +const COLORS = ['#00C49F', '#0088FE', '#82ca9d', '#113333', '#FF3322'] + +export const ChartLatency = ({ data, flowNames, onClick }) => { + return ( + + + + + + + {flowNames.map((key, index) => ( + + ))} + + + ) +} + +ChartLatency.propTypes = { + data: PropTypes.array, + flowNames: PropTypes.array, + onClick: PropTypes.func +} diff --git a/packages/ui/src/views/evaluations/ChartPassPrnt.jsx b/packages/ui/src/views/evaluations/ChartPassPrnt.jsx new file mode 100644 index 000000000..88c097ecc --- /dev/null +++ b/packages/ui/src/views/evaluations/ChartPassPrnt.jsx @@ -0,0 +1,37 @@ +import { ResponsiveContainer, PieChart, Pie, Cell, Legend } from 'recharts' +import PropTypes from 'prop-types' + +// success, failure, error +const COLORS = ['#2ecc71', '#e74c3c', '#f39c12'] +const RADIAN = Math.PI / 180 + +const renderCustomizedLabel = ({ cx, cy, midAngle, innerRadius, outerRadius, percent }) => { + const radius = innerRadius + (outerRadius - innerRadius) * 0.35 + const x = cx + radius * Math.cos(-midAngle * RADIAN) + const y = cy + radius * Math.sin(-midAngle * RADIAN) + + return ( + cx ? 'start' : 'end'} dominantBaseline='central' fontSize='11'> + {`${(percent * 100).toFixed(2)}%`} + + ) +} + +export const ChartPassPrnt = ({ data }) => { + return ( + + + + + + + + + + + ) +} + +ChartPassPrnt.propTypes = { + data: PropTypes.array +} diff --git a/packages/ui/src/views/evaluations/ChartTokens.jsx b/packages/ui/src/views/evaluations/ChartTokens.jsx new file mode 100644 index 000000000..ce8e09103 --- /dev/null +++ b/packages/ui/src/views/evaluations/ChartTokens.jsx @@ -0,0 +1,57 @@ +import { CartesianGrid, ResponsiveContainer, XAxis, YAxis, Tooltip, Bar, BarChart } from 'recharts' +import PropTypes from 'prop-types' + +export const ChartTokens = ({ data, flowNames }) => { + return ( + + + + + + + {flowNames.map((name, index) => ( + <> + + + + ))} + + + ) +} + +ChartTokens.propTypes = { + data: PropTypes.array, + flowNames: PropTypes.array +} diff --git a/packages/ui/src/views/evaluations/CreateEvaluationDialog.jsx b/packages/ui/src/views/evaluations/CreateEvaluationDialog.jsx new file mode 100644 index 000000000..1eb8df654 --- /dev/null +++ b/packages/ui/src/views/evaluations/CreateEvaluationDialog.jsx @@ -0,0 +1,707 @@ +import { createPortal } from 'react-dom' +import PropTypes from 'prop-types' +import { useState, useEffect } from 'react' + +// Material +import { + Dialog, + DialogActions, + DialogContent, + DialogTitle, + Box, + Typography, + Chip, + OutlinedInput, + Divider, + Stack, + DialogContentText, + Button, + Stepper, + Step, + Switch, + StepLabel, + IconButton, + FormControlLabel, + Checkbox +} from '@mui/material' +import { useTheme } from '@mui/material/styles' + +// Project imports +import { StyledButton } from '@/ui-component/button/StyledButton' +import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' +import { Dropdown } from '@/ui-component/dropdown/Dropdown' +import CredentialInputHandler from '@/views/canvas/CredentialInputHandler' +import { TooltipWithParser } from '@/ui-component/tooltip/TooltipWithParser' +import { MultiDropdown } from '@/ui-component/dropdown/MultiDropdown' + +// Icons +import { IconArrowLeft, IconAlertTriangle, IconTestPipe2 } from '@tabler/icons-react' + +// API +import chatflowsApi from '@/api/chatflows' +import useApi from '@/hooks/useApi' +import datasetsApi from '@/api/dataset' +import evaluatorsApi from '@/api/evaluators' +import nodesApi from '@/api/nodes' +import assistantsApi from '@/api/assistants' + +// utils +import useNotifier from '@/utils/useNotifier' + +// const +import { evaluators as evaluatorsOptions } from '../evaluators/evaluatorConstant' + +const steps = ['Datasets', 'Evaluators', 'LLM Graded Metrics'] + +const CreateEvaluationDialog = ({ show, dialogProps, onCancel, onConfirm }) => { + const portalElement = document.getElementById('portal') + const theme = useTheme() + useNotifier() + + const getAllChatflowsApi = useApi(chatflowsApi.getAllChatflows) + const getAllAgentflowsApi = useApi(chatflowsApi.getAllAgentflows) + + const getAllDatasetsApi = useApi(datasetsApi.getAllDatasets) + const getAllEvaluatorsApi = useApi(evaluatorsApi.getAllEvaluators) + const getNodesByCategoryApi = useApi(nodesApi.getNodesByCategory) + const getModelsApi = useApi(nodesApi.executeNodeLoadMethod) + const getAssistantsApi = useApi(assistantsApi.getAllAssistants) + + const [chatflow, setChatflow] = useState([]) + const [dataset, setDataset] = useState('') + const [datasetAsOneConversation, setDatasetAsOneConversation] = useState(false) + const [flowTypes, setFlowTypes] = useState([]) + + const [flows, setFlows] = useState([]) + const [datasets, setDatasets] = useState([]) + const [credentialId, setCredentialId] = useState('') + const [evaluationName, setEvaluationName] = useState('') + const [availableSimpleEvaluators, setAvailableSimpleEvaluators] = useState([]) + const [availableLLMEvaluators, setAvailableLLMEvaluators] = useState([]) + const [selectedSimpleEvaluators, setSelectedSimpleEvaluators] = useState([]) + const [selectedLLMEvaluators, setSelectedLLMEvaluators] = useState([]) + + const [activeStep, setActiveStep] = useState(0) + const [useLLM, setUseLLM] = useState(false) + + const [validationFailed, setValidationFailed] = useState(false) + + const [chatLLMs, setChatLLMs] = useState([]) + const [selectedLLM, setSelectedLLM] = useState('no_grading') + const [availableModels, setAvailableModels] = useState([]) + const [selectedModel, setSelectedModel] = useState('') + + useEffect(() => { + if (dialogProps.type === 'NEW' && dialogProps.data) { + const evaluation = dialogProps.data + const evalChatFlows = [] + JSON.parse(evaluation.chatflowId).map((f) => { + evalChatFlows.push(f) + }) + setChatflow(evalChatFlows) + setDataset(evaluation.datasetId) + setCredentialId('') + setSelectedModel('') + setSelectedLLM('no_grading') + setEvaluationName('') + setSelectedSimpleEvaluators([]) + setSelectedLLMEvaluators([]) + setActiveStep(0) + setUseLLM(false) + setCredentialId('') + } else { + resetData() + } + + return () => { + resetData() + } + }, [dialogProps]) + + const resetData = () => { + setDataset('') + setCredentialId('') + setEvaluationName('') + setSelectedSimpleEvaluators([]) + setSelectedLLMEvaluators([]) + setActiveStep(0) + setChatflow([]) + setSelectedModel('') + setSelectedLLM('no_grading') + setUseLLM(false) + setDatasetAsOneConversation(false) + } + + const validate = () => { + if (activeStep === 0) { + return evaluationName && dataset && chatflow.length > 0 + } else if (activeStep === 1) { + return true + } else if (activeStep === 2) { + if (useLLM) { + return credentialId && selectedLLM && selectedModel + } else { + return true + } + } + return false + } + + const goNext = async (prevActiveStep) => { + const isValid = validate() + setValidationFailed(!isValid) + if (isValid) { + if (prevActiveStep === steps.length - 1) { + createNewEvaluation() + } else { + setActiveStep((prevActiveStep) => prevActiveStep + 1) + } + } + } + + const goPrev = async () => { + setActiveStep((prevActiveStep) => prevActiveStep - 1) + } + + const createNewEvaluation = async () => { + const selectedChatflows = JSON.parse(chatflow) + const selectedChatflowNames = [] + for (let i = 0; i < selectedChatflows.length; i += 1) { + selectedChatflowNames.push(flows.find((f) => f.name === selectedChatflows[i])?.label) + } + const selectedChatflowTypes = [] + for (let i = 0; i < selectedChatflows.length; i += 1) { + selectedChatflowTypes.push(flows.find((f) => f.name === selectedChatflows[i])?.type) + } + const chatflowName = JSON.stringify(selectedChatflowNames) + const datasetName = datasets.find((f) => f.name === dataset)?.label + const obj = { + name: evaluationName, + evaluationType: credentialId ? 'llm' : 'benchmarking', + credentialId: credentialId, + datasetId: dataset, + datasetName: datasetName, + chatflowId: chatflow, + chatflowName: chatflowName, + chatflowType: JSON.stringify(selectedChatflowTypes), + selectedSimpleEvaluators: selectedSimpleEvaluators, + selectedLLMEvaluators: selectedLLMEvaluators, + model: selectedModel, + llm: selectedLLM, + datasetAsOneConversation: datasetAsOneConversation + } + onConfirm(obj) + } + + const disableButton = () => { + if (activeStep === 0) { + return !evaluationName || !dataset || chatflow.length === 0 + } else if (activeStep === 2) { + if (useLLM) { + if (!selectedModel || !selectedLLM || selectedLLMEvaluators.length === 0) { + return true + } + if (chatLLMs.find((llm) => llm.name === selectedLLM)?.credential && !credentialId) { + return true + } + } + return false + } + } + + const EvalWizard = () => { + return ( + + + {steps.map((label) => ( + + {label} + + ))} + + + ) + } + + useEffect(() => { + getNodesByCategoryApi.request('Chat Models') + if (flows.length === 0) { + getAllChatflowsApi.request() + getAssistantsApi.request('CUSTOM') + getAllAgentflowsApi.request('AGENTFLOW') + } + if (datasets.length === 0) { + getAllDatasetsApi.request() + } + getAllEvaluatorsApi.request() + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + useEffect(() => { + if (getAllAgentflowsApi.data && getAllChatflowsApi.data && getAssistantsApi.data) { + try { + const agentFlows = populateFlowNames(getAllAgentflowsApi.data, 'Agentflow v2') + const chatFlows = populateFlowNames(getAllChatflowsApi.data, 'Chatflow') + const assistants = populateAssistants(getAssistantsApi.data) + setFlows([...agentFlows, ...chatFlows, ...assistants]) + setFlowTypes(['Agentflow v2', 'Chatflow', 'Custom Assistant']) + } catch (e) { + console.error(e) + } + } + }, [getAllAgentflowsApi.data, getAllChatflowsApi.data, getAssistantsApi.data]) + + useEffect(() => { + if (getNodesByCategoryApi.data) { + const llmNodes = [] + try { + const nodes = getNodesByCategoryApi.data + llmNodes.push({ + label: 'No Grading', + name: 'no_grading', + credential: {} + }) + for (let i = 0; i < nodes.length; i += 1) { + const node = nodes[i] + if (!node.tags || !node.tags.indexOf('[LlamaIndex]') === -1) { + llmNodes.push({ + label: node.label, + name: node.name, + credential: node.credential + }) + } + } + setChatLLMs(llmNodes) + setSelectedLLM('no_grading') + setSelectedModel('') + setCredentialId('') + } catch (e) { + console.error(e) + } + } + }, [getNodesByCategoryApi.data]) + + useEffect(() => { + if (getModelsApi.data) { + try { + const models = getModelsApi.data + setAvailableModels(models) + } catch (e) { + console.error(e) + } + } + }, [getModelsApi.data]) + + useEffect(() => { + if (getAllEvaluatorsApi.data) { + try { + const simpleEvaluators = [] + const llmEvaluators = [] + // iterate over the evaluators and add a new property label that is the name of the evaluator + // also set the name to the id + for (let i = 0; i < getAllEvaluatorsApi.data.length; i += 1) { + const evaluator = getAllEvaluatorsApi.data[i] + evaluator.label = evaluator.name + evaluator.name = evaluator.id + if (evaluator.type === 'llm') { + llmEvaluators.push(evaluator) + } else { + simpleEvaluators.push(evaluator) + } + } + setAvailableSimpleEvaluators(simpleEvaluators) + setAvailableLLMEvaluators(llmEvaluators) + } catch (e) { + console.error(e) + } + } + }, [getAllEvaluatorsApi.data]) + + useEffect(() => { + if (getAllDatasetsApi.data) { + try { + const datasets = getAllDatasetsApi.data + let dsNames = [] + for (let i = 0; i < datasets.length; i += 1) { + const ds = datasets[i] + dsNames.push({ + label: ds.name, + name: ds.id + }) + } + setDatasets(dsNames) + } catch (e) { + console.error(e) + } + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getAllDatasetsApi.data]) + + const selectLLMForEval = (llm) => { + setUseLLM(llm !== 'no_grading') + setSelectedLLM(llm) + setSelectedModel('') + setCredentialId('') + if (llm !== 'no_grading') getModelsApi.request(llm, { loadMethod: 'listModels' }) + } + + const onChangeFlowType = (flowType) => { + const selected = flowType.target.checked + const flowTypeValue = flowType.target.value + if (selected) { + setFlowTypes([...flowTypes, flowTypeValue]) + } else { + setFlowTypes(flowTypes.filter((f) => f !== flowTypeValue)) + } + } + + const populateFlowNames = (data, type) => { + let flowNames = [] + for (let i = 0; i < data.length; i += 1) { + const flow = data[i] + flowNames.push({ + label: flow.name, + name: flow.id, + type: type, + description: type + }) + } + return flowNames + } + + const populateAssistants = (assistants) => { + let assistantNames = [] + for (let i = 0; i < assistants.length; i += 1) { + const assistant = assistants[i] + assistantNames.push({ + label: JSON.parse(assistant.details).name || '', + name: assistant.id, + type: 'Custom Assistant', + description: 'Custom Assistant' + }) + } + return assistantNames + } + + const component = show ? ( + + +
    + + {'Start New Evaluation'} +
    +
    + + + + {validationFailed && ( +
    +
    + +
    + Fill all the mandatory fields +
    + )} + + + {activeStep === 0 && ( + <> + + Select dataset to be tested on flows + + + Uses the input column from the dataset to execute selected + Chatflow(s), and compares the results with the output column. + + The following metrics will be computed: + + {evaluatorsOptions + .filter((opt) => opt.type === 'numeric' && opt.name !== 'chain') + .map((evaluator, index) => ( + + ))} + + + )} + {activeStep === 1 && ( + <> + + Unit Test your flows by adding custom evaluators + + + Post execution, all the chosen evaluators will be executed on the results. Each evaluator will grade the + results based on the criteria defined and return a pass/fail indicator. + + + + + )} + {activeStep === 2 && ( + <> + + Grade flows using an LLM + + + Post execution, grades the answers by using an LLM. Used to generate comparative scores or reasoning or + other custom defined criteria. + + + )} + + {activeStep === 0 && ( + <> + + + Name * + + + setEvaluationName(e.target.value)} + /> + + + + Dataset to use * + + setDataset(newValue)} + value={dataset} + /> + + + + Treat all dataset rows as one conversation ? + + } + value={datasetAsOneConversation} + onChange={() => setDatasetAsOneConversation(!datasetAsOneConversation)} + /> + + +
    + + Select your flows to Evaluate +  * + + + {' '} + Chatflows + {' '} + Agentflows (v2) + {' '} + Custom Assistants + +
    + flowTypes.includes(f.type))} + onSelect={(newValue) => setChatflow(newValue)} + value={chatflow ?? chatflow ?? 'choose an option'} + /> +
    + + )} + {activeStep === 1 && ( + <> + + Select the Evaluators + setSelectedSimpleEvaluators(newValue)} + value={selectedSimpleEvaluators} + /> + + + )} + {activeStep === 2 && ( + <> + + + Use an LLM to grade the results ? + + selectLLMForEval(newValue)} + /> + + {useLLM && availableModels.length > 0 && ( + + Select Model + setSelectedModel(newValue)} + /> + + )} + {useLLM && availableModels.length === 0 && ( + + Enter the Model Name + setSelectedModel(e.target.value)} + /> + + )} + {useLLM && chatLLMs.find((llm) => llm.name === selectedLLM)?.credential && ( + + Select Credential + llm.name === selectedLLM)?.credential.credentialNames[0] + ] + }} + onSelect={(newValue) => { + setCredentialId(newValue) + }} + /> + + )} + {useLLM && ( + + Select Evaluators + setSelectedLLMEvaluators(newValue)} + value={selectedLLMEvaluators} + /> + + )} + + )} + +
    +
    + + {activeStep > 0 && ( + goPrev(activeStep)}> + + + )} +
    + {activeStep === 1 && selectedSimpleEvaluators.length === 0 && ( + + )} + {activeStep === 1 && selectedSimpleEvaluators.length > 0 && ( + + )} + {activeStep !== 1 && ( + goNext(activeStep)} + > + {activeStep === steps.length - 1 ? 'Start Evaluation' : 'Next'} + + )} +
    + +
    + ) : null + + return createPortal(component, portalElement) +} + +CreateEvaluationDialog.propTypes = { + show: PropTypes.bool, + dialogProps: PropTypes.object, + onCancel: PropTypes.func, + onConfirm: PropTypes.func +} + +export default CreateEvaluationDialog diff --git a/packages/ui/src/views/evaluations/EvalsResultDialog.jsx b/packages/ui/src/views/evaluations/EvalsResultDialog.jsx new file mode 100644 index 000000000..a74c2b76e --- /dev/null +++ b/packages/ui/src/views/evaluations/EvalsResultDialog.jsx @@ -0,0 +1,456 @@ +import React from 'react' +import { createPortal } from 'react-dom' +import PropTypes from 'prop-types' +import { useSelector } from 'react-redux' + +// Material +import { + Stack, + Chip, + TableContainer, + Table, + TableHead, + TableBody, + TableRow, + Dialog, + DialogContent, + DialogTitle, + Paper, + Button, + TableCell +} from '@mui/material' +import { useTheme } from '@mui/material/styles' +import { IconVectorBezier2, IconMinimize } from '@tabler/icons-react' +import LLMIcon from '@mui/icons-material/ModelTraining' +import AlarmIcon from '@mui/icons-material/AlarmOn' +import TokensIcon from '@mui/icons-material/AutoAwesomeMotion' +import PaidIcon from '@mui/icons-material/Paid' + +// Project imports +import { StyledTableCell, StyledTableRow } from '@/ui-component/table/TableStyles' + +// const + +const EvalsResultDialog = ({ show, dialogProps, onCancel, openDetailsDrawer }) => { + const portalElement = document.getElementById('portal') + const customization = useSelector((state) => state.customization) + const theme = useTheme() + + const getColSpan = (evaluationsShown, llmEvaluations) => { + let colSpan = 1 + if (evaluationsShown) colSpan++ + if (llmEvaluations) colSpan++ + return colSpan + } + + const getOpenLink = (index) => { + if (index === undefined) { + return '' + } + if (dialogProps.data?.additionalConfig?.chatflowTypes) { + switch (dialogProps.data.additionalConfig.chatflowTypes[index]) { + case 'Chatflow': + return '/canvas/' + dialogProps.data.evaluation.chatflowId[index] + case 'Custom Assistant': + return '/assistants/custom/' + dialogProps.data.evaluation.chatflowId[index] + case 'Agentflow v2': + return '/v2/agentcanvas/' + dialogProps.data.evaluation.chatflowId[index] + } + } + return '/canvas/' + dialogProps.data.evaluation.chatflowId[index] + } + + const component = show ? ( + + + + {dialogProps.data && dialogProps.data.evaluation.chatflowName?.length > 0 && ( + +
    + + Flows Used: +
    + {(dialogProps.data.evaluation.chatflowName || []).map((chatflowUsed, index) => ( + window.open(getOpenLink(index), '_blank')} + > + ))} +
    + )} + +
    +
    + + + + + +   + Input + Expected Output + {dialogProps.data && + dialogProps.data.evaluation.chatflowId?.map((chatflowId, index) => ( + + + {dialogProps.data.evaluation.chatflowName[index]} + {dialogProps.data.rows.length > 0 && dialogProps.data.rows[0].metrics[index].model && ( + } + color={'info'} + size='small' + label={ + dialogProps.data.rows[0].metrics[index].model + + (dialogProps.data.rows[0].metrics[index].provider + ? ' [' + dialogProps.data.rows[0].metrics[index].provider + ']' + : '') + } + sx={{ ml: 2 }} + /> + )} + + + ))} + + + {dialogProps.data && + dialogProps.data.evaluation.chatflowId?.map((chatflowId, index) => ( + + + Actual Output + + {dialogProps.data.customEvalsDefined && dialogProps.data.showCustomEvals && ( + Evaluator + )} + {dialogProps.data.evaluation?.evaluationType === 'llm' && LLM Evaluation} + + ))} + + + + <> + {dialogProps.data && + dialogProps.data.rows.length > 0 && + dialogProps.data.rows.map((item, index) => ( + openDetailsDrawer(item)} + hover + key={index} + sx={{ cursor: 'pointer', '&:last-child td, &:last-child th': { border: 0 } }} + > + {index + 1} + {item.input} + {item.expectedOutput} + {dialogProps.data.evaluation.chatflowId?.map((_, index) => ( + + + {item.errors[index] === '' ? ( + <> +
    + {item.actualOutput[index]} +
    + + } + size='small' + label={ + item.metrics[index]?.totalCost + ? 'Total Cost: ' + item.metrics[index]?.totalCost + : 'Total Cost: N/A' + } + sx={{ mr: 1, mb: 1 }} + /> + } + label={ + item.metrics[index]?.totalTokens + ? 'Total Tokens: ' + item.metrics[index]?.totalTokens + : 'Total Tokens: N/A' + } + sx={{ mr: 1, mb: 1 }} + /> + {dialogProps.data.showTokenMetrics && ( + <> + } + label={ + item.metrics[index]?.promptTokens + ? 'Prompt Tokens: ' + + item.metrics[index]?.promptTokens + : 'Prompt Tokens: N/A' + } + sx={{ mr: 1, mb: 1 }} + />{' '} + } + label={ + item.metrics[index]?.completionTokens + ? 'Completion Tokens: ' + + item.metrics[index]?.completionTokens + : 'Completion Tokens: N/A' + } + sx={{ mr: 1, mb: 1 }} + />{' '} + + )} + {dialogProps.data.showCostMetrics && ( + <> + } + label={ + item.metrics[index]?.promptCost + ? 'Prompt Cost: ' + item.metrics[index]?.promptCost + : 'Prompt Cost: N/A' + } + sx={{ mr: 1, mb: 1 }} + />{' '} + } + label={ + item.metrics[index]?.completionCost + ? 'Completion Cost: ' + + item.metrics[index]?.completionCost + : 'Completion Cost: N/A' + } + sx={{ mr: 1, mb: 1 }} + />{' '} + + )} + } + label={ + item.metrics[index]?.apiLatency + ? 'API Latency: ' + item.metrics[index]?.apiLatency + : 'API Latency: N/A' + } + sx={{ mr: 1, mb: 1 }} + />{' '} + {dialogProps.data.showLatencyMetrics && ( + <> + {item.metrics[index]?.chain && ( + } + label={ + item.metrics[index]?.chain + ? 'Chain Latency: ' + item.metrics[index]?.chain + : 'Chain Latency: N/A' + } + sx={{ mr: 1, mb: 1 }} + /> + )}{' '} + {item.metrics[index]?.retriever && ( + } + size='small' + sx={{ mr: 1, mb: 1 }} + label={ + 'Retriever Latency: ' + + item.metrics[index]?.retriever + } + /> + )}{' '} + {item.metrics[index]?.tool && ( + } + size='small' + sx={{ mr: 1, mb: 1 }} + label={'Tool Latency: ' + item.metrics[index]?.tool} + /> + )}{' '} + } + size='small' + label={ + item.metrics[index]?.llm + ? 'LLM Latency: ' + item.metrics[index]?.llm + : 'LLM Latency: N/A' + } + sx={{ mr: 1, mb: 1 }} + />{' '} + + )} + + + ) : ( + + )} +
    + {dialogProps.data.customEvalsDefined && dialogProps.data.showCustomEvals && ( + + {(item.customEvals[index] || []).map((evaluator, index) => ( + + + + ))} + + )} + {dialogProps.data.evaluation?.evaluationType === 'llm' && ( + + {item.llmEvaluators[index] && ( + + {Object.entries(item.llmEvaluators[index]).map( + ([key, value], index) => ( + + {key}: {value} + + } + /> + ) + )} + + )} + + )} +
    + ))} +
    + ))} + +
    +
    +
    +
    +
    + ) : null + + return createPortal(component, portalElement) +} + +EvalsResultDialog.propTypes = { + show: PropTypes.bool, + dialogProps: PropTypes.object, + onCancel: PropTypes.func, + openDetailsDrawer: PropTypes.func +} + +export default EvalsResultDialog diff --git a/packages/ui/src/views/evaluations/EvaluationResult.jsx b/packages/ui/src/views/evaluations/EvaluationResult.jsx new file mode 100644 index 000000000..6fdde95b7 --- /dev/null +++ b/packages/ui/src/views/evaluations/EvaluationResult.jsx @@ -0,0 +1,1050 @@ +import React, { useEffect, useState } from 'react' +import { useDispatch, useSelector } from 'react-redux' +import { useNavigate } from 'react-router-dom' + +// material-ui +import { + TableContainer, + Table, + TableHead, + TableBody, + Divider, + Chip, + Paper, + Stack, + ButtonGroup, + Button, + Grid, + ListItem, + Box, + IconButton, + TableRow, + Skeleton, + TableCell +} from '@mui/material' +import { useTheme } from '@mui/material/styles' +import moment from 'moment' +import PaidIcon from '@mui/icons-material/Paid' +import { IconHierarchy, IconUsersGroup, IconRobot } from '@tabler/icons-react' +import LLMIcon from '@mui/icons-material/ModelTraining' +import AlarmIcon from '@mui/icons-material/AlarmOn' +import TokensIcon from '@mui/icons-material/AutoAwesomeMotion' + +// project imports +import MainCard from '@/ui-component/cards/MainCard' +import ViewHeader from '@/layout/MainLayout/ViewHeader' +import MetricsItemCard from '@/views/evaluations/MetricsItemCard' +import { ChartLatency } from '@/views/evaluations/ChartLatency' +import { ChartPassPrnt } from '@/views/evaluations/ChartPassPrnt' +import { ChartTokens } from '@/views/evaluations/ChartTokens' +import EvaluationResultSideDrawer from '@/views/evaluations/EvaluationResultSideDrawer' +import ErrorBoundary from '@/ErrorBoundary' +import { StyledTableCell, StyledTableRow } from '@/ui-component/table/TableStyles' +import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' +import EvaluationResultVersionsSideDrawer from '@/views/evaluations/EvaluationResultVersionsSideDrawer' +import EvalsResultDialog from '@/views/evaluations/EvalsResultDialog' +import { PermissionButton } from '@/ui-component/button/RBACButtons' + +// API +import useNotifier from '@/utils/useNotifier' +import useApi from '@/hooks/useApi' +import evaluationApi from '@/api/evaluations' + +// Hooks +import useConfirm from '@/hooks/useConfirm' +import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction } from '@/store/actions' + +// icons +import { + IconPercentage, + IconVectorBezier2, + IconMaximize, + IconClock, + IconAlertTriangle, + IconRun, + IconEye, + IconEyeOff, + IconX +} from '@tabler/icons-react' + +//const +import { useError } from '@/store/context/ErrorContext' + +// ==============================|| EvaluationResults ||============================== // + +const EvalEvaluationRows = () => { + const navigate = useNavigate() + const theme = useTheme() + const customization = useSelector((state) => state.customization) + const { confirm } = useConfirm() + const dispatch = useDispatch() + useNotifier() + const { error } = useError() + + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const [rows, setRows] = useState([]) + const [selectedEvaluationName, setSelectedEvaluationName] = useState('') + const [evaluation, setEvaluation] = useState({}) + + const [showCostMetrics, setShowCostMetrics] = useState(false) + const [showLatencyMetrics, setShowLatencyMetrics] = useState(false) + const [showTokenMetrics, setShowTokenMetrics] = useState(false) + const [showCustomEvals, setShowCustomEvals] = useState(false) + const [showCharts, setShowCharts] = useState(true) + + const [latencyChartData, setLatencyChartData] = useState([]) + const [tokensChartData, setTokensChartData] = useState([]) + const [passPrntChartData, setPassPcntChartData] = useState([]) + const [avgTokensUsed, setAvgTokensUsed] = useState() + + const [showSideDrawer, setShowSideDrawer] = useState(false) + const [sideDrawerDialogProps, setSideDrawerDialogProps] = useState({}) + + const [showVersionSideDrawer, setShowVersionSideDrawer] = useState(false) + const [versionDrawerDialogProps, setVersionDrawerDialogProps] = useState({}) + + const [outdated, setOutdated] = useState(null) + + const getEvaluation = useApi(evaluationApi.getEvaluation) + const getIsOutdatedApi = useApi(evaluationApi.getIsOutdated) + const runAgainApi = useApi(evaluationApi.runAgain) + + const [customEvalsDefined, setCustomEvalsDefined] = useState(false) + + const [showExpandTableDialog, setShowExpandTableDialog] = useState(false) + const [expandTableProps, setExpandTableProps] = useState({}) + const [isTableLoading, setTableLoading] = useState(false) + + const [additionalConfig, setAdditionalConfig] = useState({}) + + const openDetailsDrawer = (item) => { + setSideDrawerDialogProps({ + type: 'View', + data: item, + additionalConfig: additionalConfig, + evaluationType: evaluation.evaluationType, + evaluationChatflows: evaluation.chatflowName + }) + setShowSideDrawer(true) + } + + const closeDetailsDrawer = () => { + setShowSideDrawer(false) + } + + const openVersionsDrawer = () => { + setVersionDrawerDialogProps({ + id: evaluation?.id + }) + setShowVersionSideDrawer(true) + } + + const closeVersionsDrawer = () => { + setShowVersionSideDrawer(false) + } + + const handleShowChartsChange = () => { + setShowCharts(!showCharts) + } + + const handleShowTokenChange = () => { + setShowTokenMetrics(!showTokenMetrics) + } + + const handleLatencyMetricsChange = () => { + setShowLatencyMetrics(!showLatencyMetrics) + } + + const handleCustomEvalsChange = () => { + setShowCustomEvals(!showCustomEvals) + } + const handleDisplayCostChange = () => { + setShowCostMetrics(!showCostMetrics) + } + + const openTableDialog = () => { + setExpandTableProps({ + data: { + evaluation, + rows, + customEvalsDefined, + showCustomEvals, + showTokenMetrics, + showLatencyMetrics, + showCostMetrics, + additionalConfig + } + }) + setShowExpandTableDialog(true) + } + + const runAgain = async () => { + const confirmPayload = { + title: `Run Again`, + description: `Initiate Rerun for Evaluation ${evaluation.name}?`, + confirmButtonName: 'Yes', + cancelButtonName: 'No' + } + const isConfirmed = await confirm(confirmPayload) + + if (isConfirmed) { + runAgainApi.request(evaluation?.id) + enqueueSnackbar({ + message: "Evaluation '" + evaluation.name + "' is running. Redirecting to evaluations page.", + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + navigate(`/evaluations`) + } + } + + const URLpath = document.location.pathname.toString().split('/') + const evalId = URLpath[URLpath.length - 1] === 'evaluation_rows' ? '' : URLpath[URLpath.length - 1] + + const goBack = () => { + navigate(`/evaluations`) + } + + const getColSpan = (evaluationsShown, llmEvaluations) => { + let colSpan = 1 + if (evaluationsShown) colSpan++ + if (llmEvaluations) colSpan++ + return colSpan + } + + useEffect(() => { + getEvaluation.request(evalId) + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + useEffect(() => { + setTableLoading(getEvaluation.loading) + }, [getEvaluation.loading]) + + useEffect(() => { + if (getIsOutdatedApi.data) { + if (getIsOutdatedApi.data.isOutdated) { + setOutdated(getIsOutdatedApi.data) + } + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getIsOutdatedApi.data]) + + useEffect(() => { + if (getEvaluation.data) { + const data = getEvaluation.data + setSelectedEvaluationName(data.name) + getIsOutdatedApi.request(data.id) + if (data.additionalConfig) { + setAdditionalConfig(JSON.parse(data.additionalConfig)) + } + data.chatflowId = typeof data.chatflowId === 'object' ? data.chatflowId : JSON.parse(data.chatflowId) + data.chatflowName = typeof data.chatflowName === 'object' ? data.chatflowName : JSON.parse(data.chatflowName) + const rows = getEvaluation.data.rows + const latencyChartData = [] + const tokensChartData = [] + let totalTokens = 0 + for (let i = 0; i < rows.length; i++) { + rows[i].metrics = typeof rows[i].metrics === 'object' ? rows[i].metrics : JSON.parse(rows[i].metrics) + rows[i].actualOutput = typeof rows[i].actualOutput === 'object' ? rows[i].actualOutput : JSON.parse(rows[i].actualOutput) + rows[i].customEvals = typeof rows[i].evaluators === 'object' ? rows[i].evaluators : JSON.parse(rows[i].evaluators || []) + const latencyObj = { + y: i + 1 + } + const tokensObj = { + y: i + 1 + } + for (let m = 0; m < rows[i].metrics.length; m++) { + if (rows[i].metrics[m]?.apiLatency > 0) { + latencyObj[data.chatflowName[m]] = parseFloat(rows[i].metrics[m]?.apiLatency, 10) + } + if (rows[i].metrics[m]?.totalTokens) { + totalTokens += rows[i].metrics[m]?.totalTokens + tokensObj[data.chatflowName[m] + ' Prompt'] = rows[i].metrics[m]?.promptTokens + tokensObj[data.chatflowName[m] + ' Completion'] = rows[i].metrics[m]?.completionTokens + } + } + latencyChartData.push(latencyObj) + tokensChartData.push(tokensObj) + if (rows[i].llmEvaluators) { + rows[i].llmEvaluators = + typeof rows[i].llmEvaluators === 'object' ? rows[i].llmEvaluators : JSON.parse(rows[i].llmEvaluators || []) + } + if ( + rows[i].errors && + typeof rows[i].errors === 'string' && + rows[i].errors.startsWith('[') && + rows[i].errors.endsWith(']') + ) { + rows[i].errors = JSON.parse(rows[i].errors) || [] + } + } + setRows(rows) + setLatencyChartData(latencyChartData) + setTokensChartData(tokensChartData) + const evaluation = data + evaluation.average_metrics = + typeof evaluation.average_metrics === 'object' ? evaluation.average_metrics : JSON.parse(evaluation.average_metrics) + const passPntData = [] + setCustomEvalsDefined(data?.average_metrics?.passPcnt >= 0) + setShowCustomEvals(data?.average_metrics?.passPcnt >= 0) + if (data?.average_metrics?.passCount >= 0) { + passPntData.push({ + name: 'Pass', + value: data.average_metrics.passCount + }) + } + if (data?.average_metrics?.failCount >= 0) { + passPntData.push({ + name: 'Fail', + value: data.average_metrics.failCount + }) + } + if (data?.average_metrics?.errorCount >= 0) { + passPntData.push({ + name: 'Error', + value: data.average_metrics.errorCount + }) + } + setPassPcntChartData(passPntData) + setAvgTokensUsed((totalTokens / rows.length).toFixed(2)) + setEvaluation(evaluation) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getEvaluation.data]) + + const getOpenLink = (index) => { + if (index === undefined) { + return undefined + } + const id = evaluation.chatflowId[index] + // this is to check if the evaluation is deleted! + if (outdated?.errors?.length > 0 && outdated.errors.find((e) => e.id === id)) { + return undefined + } + if (additionalConfig.chatflowTypes) { + switch (additionalConfig.chatflowTypes[index]) { + case 'Chatflow': + return '/canvas/' + evaluation.chatflowId[index] + case 'Custom Assistant': + return '/assistants/custom/' + evaluation.chatflowId[index] + case 'Agentflow v2': + return '/v2/agentcanvas/' + evaluation.chatflowId[index] + } + } + return '/canvas/' + evaluation.chatflowId[index] + } + + const openFlow = (index) => { + const url = getOpenLink(index) + if (url) { + window.open(getOpenLink(index), '_blank') + } + } + + const getFlowIcon = (index) => { + if (index === undefined) { + return + } + if (additionalConfig.chatflowTypes) { + switch (additionalConfig.chatflowTypes[index]) { + case 'Chatflow': + return + case 'Custom Assistant': + return + case 'Agentflow v2': + return + } + } + return + } + return ( + <> + + {error ? ( + + ) : ( + + + {evaluation?.versionCount > 1 && ( + + )} + {evaluation?.versionCount > 1 && ( + + )} + } + variant='contained' + color='primary' + disabled={outdated?.errors?.length > 0} + onClick={runAgain} + > + Re-run Evaluation + + + + + {outdated && ( +
    + + + + + + {outdated?.errors?.length > 0 && ( + This evaluation cannot be re-run, due to the following errors + )} + {outdated?.errors?.length === 0 && ( + The following items are outdated, re-run the evaluation for the latest results. + )} + + {outdated.dataset && outdated?.errors?.length === 0 && ( + <> +
    + Dataset: + window.open(`/dataset_rows/${outdated.dataset.id}`, '_blank')} + > + + )} + {outdated.chatflows && outdated?.errors?.length === 0 && outdated.chatflows.length > 0 && ( + <> +
    + Flows: + + {outdated.chatflows.map((chatflow, index) => ( + + window.open( + chatflow.chatflowType === 'Chatflow' + ? '/canvas/' + chatflow.chatflowId + : chatflow.chatflowType === 'Custom Assistant' + ? '/assistants/custom/' + chatflow.chatflowId + : '/v2/agentcanvas/' + chatflow.chatflowId, + '_blank' + ) + } + > + ))} + + + )} + {outdated.errors.length > 0 && + outdated.errors.map((error, index) => {error.message})} + setOutdated(null)} + > + + +
    +
    + )} + + + {customEvalsDefined && ( + + )} + + + + + {showCharts && ( + + {customEvalsDefined && ( + + + }} + component={} + /> + + )} + {avgTokensUsed !== undefined && !isNaN(avgTokensUsed) && ( + + + }} + component={ + + } + /> + + )} + {evaluation.average_metrics?.averageLatency !== undefined && ( + + + }} + component={ + + } + /> + + )} + + )} + + +
    + + Flows Used: +
    + {(evaluation.chatflowName || []).map((chatflowUsed, index) => ( + openFlow(index)} + > + ))} +
    + +
    + + + + +   + Input + Expected Output + {evaluation.chatflowId?.map((chatflowId, index) => ( + + + {evaluation.chatflowName[index]} + {rows.length > 0 && rows[0].metrics[index].model && ( + } + color={'info'} + size='small' + label={ + rows[0].metrics[index].model + + (rows[0].metrics[index].provider + ? ' [' + rows[0].metrics[index].provider + ']' + : '') + } + sx={{ ml: 2 }} + /> + )} + + + ))} + + + {evaluation.chatflowId?.map((chatflowId, index) => ( + + + Actual Output + + {customEvalsDefined && showCustomEvals && Evaluator} + {evaluation?.evaluationType === 'llm' && LLM Evaluation} + + ))} + + + + {isTableLoading ? ( + <> + + + + + + + + + + + + + + + + + + + + + + + + ) : ( + <> + {rows.length > 0 && + rows.map((item, index) => ( + openDetailsDrawer(item)} + hover + key={index} + sx={{ cursor: 'pointer', '&:last-child td, &:last-child th': { border: 0 } }} + > + {index + 1} + {item.input} + {item.expectedOutput} + {evaluation.chatflowId?.map((_, index) => ( + + + {item.errors[index] === '' ? ( + <> +
    + {item.actualOutput[index]} +
    + + } + size='small' + label={ + item.metrics[index]?.totalCost + ? 'Total Cost: ' + + item.metrics[index]?.totalCost + : 'Total Cost: N/A' + } + sx={{ mr: 1, mb: 1 }} + /> + } + label={ + item.metrics[index]?.totalTokens + ? 'Total Tokens: ' + + item.metrics[index]?.totalTokens + : 'Total Tokens: N/A' + } + sx={{ mr: 1, mb: 1 }} + /> + {showTokenMetrics && ( + <> + } + label={ + item.metrics[index]?.promptTokens + ? 'Prompt Tokens: ' + + item.metrics[index]?.promptTokens + : 'Prompt Tokens: N/A' + } + sx={{ mr: 1, mb: 1 }} + />{' '} + } + label={ + item.metrics[index]?.completionTokens + ? 'Completion Tokens: ' + + item.metrics[index]?.completionTokens + : 'Completion Tokens: N/A' + } + sx={{ mr: 1, mb: 1 }} + />{' '} + + )} + {showCostMetrics && ( + <> + } + label={ + item.metrics[index]?.promptCost + ? 'Prompt Cost: ' + + item.metrics[index]?.promptCost + : 'Prompt Cost: N/A' + } + sx={{ mr: 1, mb: 1 }} + />{' '} + } + label={ + item.metrics[index]?.completionCost + ? 'Completion Cost: ' + + item.metrics[index]?.completionCost + : 'Completion Cost: N/A' + } + sx={{ mr: 1, mb: 1 }} + />{' '} + + )} + } + label={ + item.metrics[index]?.apiLatency + ? 'API Latency: ' + + item.metrics[index]?.apiLatency + : 'API Latency: N/A' + } + sx={{ mr: 1, mb: 1 }} + />{' '} + {showLatencyMetrics && ( + <> + {item.metrics[index]?.chain && ( + } + label={ + item.metrics[index]?.chain + ? 'Chain Latency: ' + + item.metrics[index]?.chain + : 'Chain Latency: N/A' + } + sx={{ mr: 1, mb: 1 }} + /> + )}{' '} + {item.metrics[index]?.retriever && ( + } + size='small' + sx={{ mr: 1, mb: 1 }} + label={ + 'Retriever Latency: ' + + item.metrics[index]?.retriever + } + /> + )}{' '} + {item.metrics[index]?.tool && ( + } + size='small' + sx={{ mr: 1, mb: 1 }} + label={ + 'Tool Latency: ' + + item.metrics[index]?.tool + } + /> + )}{' '} + } + size='small' + label={ + item.metrics[index]?.llm + ? 'LLM Latency: ' + + item.metrics[index]?.llm + : 'LLM Latency: N/A' + } + sx={{ mr: 1, mb: 1 }} + />{' '} + + )} + + + ) : ( + + )} +
    + {customEvalsDefined && showCustomEvals && ( + + {(item.customEvals[index] || []).map((evaluator, index) => ( + + + + ))} + + )} + {evaluation?.evaluationType === 'llm' && ( + + {item.llmEvaluators[index] && ( + + {Object.entries(item.llmEvaluators[index]).map( + ([key, value], index) => ( + + {key.toUpperCase()}: {value} + + } + /> + ) + )} + + )} + + )} +
    + ))} +
    + ))} + + )} +
    +
    +
    + {showSideDrawer && ( + + )} + {showVersionSideDrawer && ( + { + setShowVersionSideDrawer(false) + navigate(`/evaluation_results/${versionId}`) + navigate(0) + }} + /> + )} +
    + )} +
    + + setShowExpandTableDialog(false)} + openDetailsDrawer={(item) => { + openDetailsDrawer(item) + }} + /> + + ) +} + +export default EvalEvaluationRows diff --git a/packages/ui/src/views/evaluations/EvaluationResultSideDrawer.jsx b/packages/ui/src/views/evaluations/EvaluationResultSideDrawer.jsx new file mode 100644 index 000000000..c415fb983 --- /dev/null +++ b/packages/ui/src/views/evaluations/EvaluationResultSideDrawer.jsx @@ -0,0 +1,518 @@ +import PropTypes from 'prop-types' +import { + CardContent, + Card, + Box, + SwipeableDrawer, + Stack, + Button, + Chip, + Divider, + Typography, + Table, + TableHead, + TableRow, + TableBody +} from '@mui/material' +import { IconHierarchy, IconUsersGroup, IconRobot } from '@tabler/icons-react' + +import { useSelector } from 'react-redux' +import { evaluators as evaluatorsOptions, numericOperators } from '../evaluators/evaluatorConstant' +import TableCell from '@mui/material/TableCell' +import { Close } from '@mui/icons-material' + +const EvaluationResultSideDrawer = ({ show, dialogProps, onClickFunction }) => { + const onOpen = () => {} + const customization = useSelector((state) => state.customization) + + const getEvaluatorValue = (evaluator) => { + if (evaluator.type === 'text') { + return '"' + evaluator.value + '"' + } else if (evaluator.name === 'json') { + return '' + } else if (evaluator.type === 'numeric') { + return evaluator.value + } + return '' + } + + const getFlowIcon = (index) => { + if (index === undefined) { + return + } + if (dialogProps.additionalConfig.chatflowTypes) { + switch (dialogProps.additionalConfig.chatflowTypes[index]) { + case 'Chatflow': + return + case 'Custom Assistant': + return + case 'Agentflow v2': + return + } + } + return + } + + return ( + onClickFunction()} onOpen={onOpen}> +
    + + Evaluation Details + +
    + + + + Evaluation Id + + {dialogProps.data.evaluationId} + + +
    + + + +
    + + Input + + {dialogProps.data.input} +
    + +
    + + + +
    + + Expected Output + + {dialogProps.data.expectedOutput} +
    + + {dialogProps.data && + dialogProps.data.actualOutput?.length > 0 && + dialogProps.data.actualOutput.map((output, index) => ( + + + {dialogProps.evaluationChatflows?.length > 0 && ( + <> +
    + {getFlowIcon(index)} + + {dialogProps.evaluationChatflows[index]} + +
    + + + )} + +
    + + {dialogProps.data.errors[index] === '' ? 'Actual Output' : 'Error'} + + + {dialogProps.data.errors[index] === '' ? ( + dialogProps.data.actualOutput[index] + ) : ( + + )} + +
    +
    + + +
    + + Latency Metrics + + + + + {dialogProps.data.metrics[index]?.chain && ( + + )} + {dialogProps.data.metrics[index]?.retriever && ( + + )} + {dialogProps.data.metrics[index]?.tool && ( + + )} + + + +
    +
    + +
    + {dialogProps.data.metrics[index]?.nested_metrics ? ( + + + Tokens + + + + + + Node + + + Provider & Model + + + Input + + + Output + + + Total + + + + + {dialogProps.data.metrics[index]?.nested_metrics?.map((metric, index) => ( + + + {metric.nodeLabel} + + + {metric.provider} +
    + {metric.model} +
    + + {metric.promptTokens} + + + {metric.completionTokens} + + + {metric.totalTokens} + +
    + ))} + + + Total + + + {dialogProps.data.metrics[index].promptTokens} + + + {dialogProps.data.metrics[index].completionTokens} + + + {dialogProps.data.metrics[index].totalTokens} + + +
    +
    +
    + ) : ( + + + Tokens + + + + + + + + + + )} +
    + {dialogProps.data.metrics[index]?.nested_metrics ? ( + + + Cost + + + + + + Node + + + Provider & Model + + + Input + + + Output + + + Total + + + + + {dialogProps.data.metrics[index]?.nested_metrics?.map((metric, index) => ( + + + {metric.nodeLabel} + + + {metric.provider}
    + {metric.model} +
    + + {metric.promptCost} + + + {metric.completionCost} + + + {metric.totalCost} + +
    + ))} + + + Total + + + {dialogProps.data.metrics[index].promptCost} + + + {dialogProps.data.metrics[index].completionCost} + + + {dialogProps.data.metrics[index].totalCost} + + +
    +
    +
    + ) : ( + + + Cost + + + + + + + + + + )} +
    + +
    + {dialogProps.data?.customEvals && + dialogProps.data?.customEvals[index] && + dialogProps.data.customEvals[index].length > 0 && ( + + + Custom Evaluators + + + {dialogProps.data.customEvals[index] && + dialogProps.data.customEvals[index].map((evaluator, index) => ( + + + + opt.name === evaluator.measure + )?.label || 'Actual Output' + } ${ + [...evaluatorsOptions, ...numericOperators] + .find((opt) => opt.name === evaluator.operator) + ?.label.toLowerCase() || '' + } ${getEvaluatorValue(evaluator)}`} + > + + ))} + + + )} + {dialogProps?.evaluationType === 'llm' && ( + <> +
    + + +
    + + LLM Graded + + + {Object.entries(dialogProps.data.llmEvaluators[index]).map(([key, value], index) => ( + + {key}: {value} + + } + /> + ))} + +
    + + )} +
    +
    + ))} +
    +
    + ) +} + +EvaluationResultSideDrawer.propTypes = { + show: PropTypes.bool, + dialogProps: PropTypes.object, + onClickFunction: PropTypes.func +} + +export default EvaluationResultSideDrawer diff --git a/packages/ui/src/views/evaluations/EvaluationResultVersionsSideDrawer.jsx b/packages/ui/src/views/evaluations/EvaluationResultVersionsSideDrawer.jsx new file mode 100644 index 000000000..91f5ad7c8 --- /dev/null +++ b/packages/ui/src/views/evaluations/EvaluationResultVersionsSideDrawer.jsx @@ -0,0 +1,84 @@ +import { useEffect, useState } from 'react' +import PropTypes from 'prop-types' +import moment from 'moment/moment' + +import { Button, Box, SwipeableDrawer } from '@mui/material' +import { IconSquareRoundedChevronsRight } from '@tabler/icons-react' +import { + Timeline, + TimelineConnector, + TimelineContent, + TimelineDot, + TimelineItem, + TimelineOppositeContent, + timelineOppositeContentClasses, + TimelineSeparator +} from '@mui/lab' + +import evaluationApi from '@/api/evaluations' +import useApi from '@/hooks/useApi' + +const EvaluationResultVersionsSideDrawer = ({ show, dialogProps, onClickFunction, onSelectVersion }) => { + const onOpen = () => {} + const [versions, setVersions] = useState([]) + + const getVersionsApi = useApi(evaluationApi.getVersions) + + useEffect(() => { + getVersionsApi.request(dialogProps.id) + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [dialogProps]) + + useEffect(() => { + if (getVersionsApi.data) { + setVersions(getVersionsApi.data.versions) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getVersionsApi.data]) + + const navigateToEvaluationResult = (id) => { + onSelectVersion(id) + } + + return ( + onClickFunction()} onOpen={onOpen}> + + + + {versions && + versions.map((version, index) => ( + + + {moment(version.runDate).format('DD-MMM-YYYY, hh:mm:ss A')} + + + + {index !== versions.length - 1 && } + + + + + + ))} + + + + ) +} + +EvaluationResultVersionsSideDrawer.propTypes = { + show: PropTypes.bool, + dialogProps: PropTypes.object, + onClickFunction: PropTypes.func, + onSelectVersion: PropTypes.func +} + +export default EvaluationResultVersionsSideDrawer diff --git a/packages/ui/src/views/evaluations/MetricsItemCard.jsx b/packages/ui/src/views/evaluations/MetricsItemCard.jsx new file mode 100644 index 000000000..380f19c72 --- /dev/null +++ b/packages/ui/src/views/evaluations/MetricsItemCard.jsx @@ -0,0 +1,58 @@ +import PropTypes from 'prop-types' + +// material-ui +import { styled } from '@mui/material/styles' +import { Box, Grid, Typography } from '@mui/material' + +// project imports +import MainCard from '@/ui-component/cards/MainCard' +import SkeletonChatflowCard from '@/ui-component/cards/Skeleton/ChatflowCard' + +const CardWrapper = styled(MainCard)(({ theme }) => ({ + background: theme.palette.card.main, + color: theme.darkTextPrimary, + overflow: 'hidden', + position: 'relative', + boxShadow: '0 2px 14px 0 rgb(32 40 45 / 8%)', + cursor: 'pointer', + '&:hover': { + background: theme.palette.card.hover, + boxShadow: '0 2px 14px 0 rgb(32 40 45 / 20%)' + }, + overflowWrap: 'break-word', + whiteSpace: 'pre-line' +})) + +const MetricsItemCard = ({ isLoading, data, component }) => { + return ( + <> + {isLoading ? ( + + ) : ( + + + + + + {data.icon} + + {data.header} + + + + + + {component} + + )} + + ) +} + +MetricsItemCard.propTypes = { + isLoading: PropTypes.bool, + data: PropTypes.object, + component: PropTypes.element +} + +export default MetricsItemCard diff --git a/packages/ui/src/views/evaluations/index.jsx b/packages/ui/src/views/evaluations/index.jsx new file mode 100644 index 000000000..85a2d6c66 --- /dev/null +++ b/packages/ui/src/views/evaluations/index.jsx @@ -0,0 +1,895 @@ +import React, { useEffect, useState, useCallback } from 'react' +import * as PropTypes from 'prop-types' +import moment from 'moment/moment' +import { useNavigate } from 'react-router-dom' +import { useDispatch, useSelector } from 'react-redux' + +// material-ui +import { + Checkbox, + Skeleton, + TableCell, + Box, + Button, + Chip, + Collapse, + IconButton, + Paper, + Stack, + Table, + TableBody, + TableContainer, + TableHead, + TableRow, + ToggleButton +} from '@mui/material' +import { useTheme } from '@mui/material/styles' +import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction } from '@/store/actions' + +// API +import evaluationApi from '@/api/evaluations' +import useApi from '@/hooks/useApi' + +// Hooks +import useConfirm from '@/hooks/useConfirm' +import useNotifier from '@/utils/useNotifier' +import { useError } from '@/store/context/ErrorContext' + +// project +import MainCard from '@/ui-component/cards/MainCard' +import { BackdropLoader } from '@/ui-component/loading/BackdropLoader' +import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' +import ErrorBoundary from '@/ErrorBoundary' +import ViewHeader from '@/layout/MainLayout/ViewHeader' +import { StyledTableCell, StyledTableRow } from '@/ui-component/table/TableStyles' +import CreateEvaluationDialog from '@/views/evaluations/CreateEvaluationDialog' +import { StyledPermissionButton } from '@/ui-component/button/RBACButtons' +import TablePagination, { DEFAULT_ITEMS_PER_PAGE } from '@/ui-component/pagination/TablePagination' + +// icons +import { + IconChartHistogram, + IconPlus, + IconChartBar, + IconRefresh, + IconTrash, + IconX, + IconChevronsUp, + IconChevronsDown, + IconPlayerPlay, + IconPlayerPause +} from '@tabler/icons-react' +import empty_evalSVG from '@/assets/images/empty_evals.svg' + +const EvalsEvaluation = () => { + const theme = useTheme() + const customization = useSelector((state) => state.customization) + const { confirm } = useConfirm() + const dispatch = useDispatch() + useNotifier() + const { error } = useError() + + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const createNewEvaluation = useApi(evaluationApi.createEvaluation) + const getAllEvaluations = useApi(evaluationApi.getAllEvaluations) + + const [showNewEvaluationDialog, setShowNewEvaluationDialog] = useState(false) + const [dialogProps, setDialogProps] = useState({}) + const [rows, setRows] = useState([]) + const [loading, setLoading] = useState(false) + const [isTableLoading, setTableLoading] = useState(false) + const [selected, setSelected] = useState([]) + const [autoRefresh, setAutoRefresh] = useState(false) + + /* Table Pagination */ + const [currentPage, setCurrentPage] = useState(1) + const [pageLimit, setPageLimit] = useState(DEFAULT_ITEMS_PER_PAGE) + const [total, setTotal] = useState(0) + const onChange = (page, pageLimit) => { + setCurrentPage(page) + setPageLimit(pageLimit) + refresh(page, pageLimit) + } + + const refresh = (page, limit) => { + const params = { + page: page || currentPage, + limit: limit || pageLimit + } + getAllEvaluations.request(params) + } + + const onSelectAllClick = (event) => { + if (event.target.checked) { + const newSelected = rows.filter((item) => item?.latestEval).map((n) => n.id) + setSelected(newSelected) + return + } + setSelected([]) + } + + const handleSelect = (event, id) => { + const selectedIndex = selected.indexOf(id) + let newSelected = [] + + if (selectedIndex === -1) { + newSelected = newSelected.concat(selected, id) + } else if (selectedIndex === 0) { + newSelected = newSelected.concat(selected.slice(1)) + } else if (selectedIndex === selected.length - 1) { + newSelected = newSelected.concat(selected.slice(0, -1)) + } else if (selectedIndex > 0) { + newSelected = newSelected.concat(selected.slice(0, selectedIndex), selected.slice(selectedIndex + 1)) + } + setSelected(newSelected) + } + + const createEvaluation = () => { + const dialogProp = { + type: 'ADD', + cancelButtonName: 'Cancel', + confirmButtonName: 'Start New Evaluation', + data: {} + } + setDialogProps(dialogProp) + setShowNewEvaluationDialog(true) + } + + const deleteEvaluationsAllVersions = async () => { + const confirmPayload = { + title: `Delete`, + description: `Delete ${selected.length} ${ + selected.length > 1 ? 'evaluations' : 'evaluation' + }? This will delete all versions of the evaluation.`, + confirmButtonName: 'Delete', + cancelButtonName: 'Cancel' + } + const isConfirmed = await confirm(confirmPayload) + + if (isConfirmed) { + try { + const isDeleteAllVersion = true + const deleteResp = await evaluationApi.deleteEvaluations(selected, isDeleteAllVersion) + if (deleteResp.data) { + enqueueSnackbar({ + message: `${selected.length} ${selected.length > 1 ? 'evaluations' : 'evaluation'} deleted`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onRefresh() + } + } catch (error) { + enqueueSnackbar({ + message: `Failed to delete ${selected.length > 1 ? 'evaluations' : 'evaluation'}: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } + setSelected([]) + } + } + + useEffect(() => { + refresh(currentPage, pageLimit) + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + useEffect(() => { + if (getAllEvaluations.data) { + const evalRows = getAllEvaluations.data.data + setTotal(getAllEvaluations.data.total) + if (evalRows) { + // Prepare the data for the table + for (let i = 0; i < evalRows.length; i++) { + const evalRow = evalRows[i] + evalRows[i].runDate = moment(evalRow.runDate).format('DD-MMM-YYYY, hh:mm:ss A') + evalRows[i].average_metrics = + typeof evalRow.average_metrics === 'object' ? evalRow.average_metrics : JSON.parse(evalRow.average_metrics) + evalRows[i].usedFlows = + typeof evalRow.chatflowName === 'object' ? evalRow.chatflowName : JSON.parse(evalRow.chatflowName) + evalRows[i].chatIds = typeof evalRow.chatflowId === 'object' ? evalRow.chatflowId : JSON.parse(evalRow.chatflowId) + } + setRows(evalRows) + } + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getAllEvaluations.data]) + + useEffect(() => { + if (createNewEvaluation.data) { + const evalRows = createNewEvaluation.data + for (let i = 0; i < evalRows.length; i++) { + const evalRow = evalRows[i] + evalRows[i].runDate = moment(evalRow.runDate).format('DD-MMM-YYYY, hh:mm:ss A') + evalRows[i].average_metrics = + typeof evalRow.average_metrics === 'object' ? evalRow.average_metrics : JSON.parse(evalRow.average_metrics) + evalRows[i].usedFlows = typeof evalRow.chatflowName === 'object' ? evalRow.chatflowName : JSON.parse(evalRow.chatflowName) + evalRows[i].chatIds = typeof evalRow.chatflowId === 'object' ? evalRow.chatflowId : JSON.parse(evalRow.chatflowId) + } + setRows(evalRows) + } + setLoading(false) + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [createNewEvaluation.data]) + + const onConfirm = (evaluationData) => { + setShowNewEvaluationDialog(false) + setLoading(true) + createNewEvaluation.request(evaluationData) + } + + useEffect(() => { + if (createNewEvaluation.error) { + // Change to Notifstack + enqueueSnackbar({ + message: `Failed to create new evaluation: ${ + typeof createNewEvaluation.error.response?.data === 'object' + ? createNewEvaluation.error.response.data.message + : createNewEvaluation.error.response?.data || createNewEvaluation.error.message || 'Unknown error' + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + setLoading(false) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [createNewEvaluation.error]) + + const onRefresh = useCallback(() => { + refresh(currentPage, pageLimit) + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getAllEvaluations]) + + useEffect(() => { + setTableLoading(getAllEvaluations.loading) + }, [getAllEvaluations.loading]) + + useEffect(() => { + let intervalId = null + + if (autoRefresh) { + intervalId = setInterval(() => { + onRefresh() + }, 5000) + } + + return () => { + if (intervalId) { + clearInterval(intervalId) + } + } + }, [autoRefresh, onRefresh]) + + const toggleAutoRefresh = () => { + setAutoRefresh(!autoRefresh) + } + + return ( + <> + + {error ? ( + + ) : ( + + + + {autoRefresh ? : } + + + + + } + > + New Evaluation + + + {selected.length > 0 && ( + } + > + Delete {selected.length} {selected.length === 1 ? 'evaluation' : 'evaluations'} + + )} + {!isTableLoading && rows.length <= 0 ? ( + + + empty_evalSVG + +
    No Evaluations Yet
    +
    + ) : ( + <> + + + + + + item?.latestEval) || []).length} + onChange={onSelectAllClick} + inputProps={{ + 'aria-label': 'select all' + }} + /> + + + Name + Latest Version + Average Metrics + Last Evaluated + Flow(s) + Dataset + + + + + {isTableLoading ? ( + <> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ) : ( + <> + {rows + .filter((item) => item?.latestEval) + .map((item, index) => ( + row.name === item.name)} + item={item} + key={index} + theme={theme} + selected={selected} + customization={customization} + onRefresh={onRefresh} + handleSelect={handleSelect} + /> + ))} + + )} + +
    +
    + {/* Pagination and Page Size Controls */} + + + )} +
    + )} +
    + {showNewEvaluationDialog && ( + setShowNewEvaluationDialog(false)} + onConfirm={onConfirm} + > + )} + + {loading && } + + ) +} + +function EvaluationRunRow(props) { + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const [open, setOpen] = useState(false) + const [childSelected, setChildSelected] = useState([]) + + const theme = useTheme() + const navigate = useNavigate() + const { confirm } = useConfirm() + const dispatch = useDispatch() + + const showResults = (item) => { + navigate(`/evaluation_results/${item.id}`) + } + + const goToDataset = (id) => { + window.open(`/dataset_rows/${id}`, '_blank') + } + + const onSelectAllChildClick = (event) => { + if (event.target.checked) { + const newSelected = (props?.rows || []).map((n) => n.id) + setChildSelected(newSelected) + return + } + setChildSelected([]) + } + + const handleSelectChild = (event, id) => { + const selectedIndex = childSelected.indexOf(id) + let newSelected = [] + + if (selectedIndex === -1) { + newSelected = newSelected.concat(childSelected, id) + } else if (selectedIndex === 0) { + newSelected = newSelected.concat(childSelected.slice(1)) + } else if (selectedIndex === childSelected.length - 1) { + newSelected = newSelected.concat(childSelected.slice(0, -1)) + } else if (selectedIndex > 0) { + newSelected = newSelected.concat(childSelected.slice(0, selectedIndex), childSelected.slice(selectedIndex + 1)) + } + setChildSelected(newSelected) + } + + const deleteChildEvaluations = async () => { + const confirmPayload = { + title: `Delete`, + description: `Delete ${childSelected.length} ${childSelected.length > 1 ? 'evaluations' : 'evaluation'}?`, + confirmButtonName: 'Delete', + cancelButtonName: 'Cancel' + } + const isConfirmed = await confirm(confirmPayload) + + if (isConfirmed) { + try { + const deleteResp = await evaluationApi.deleteEvaluations(childSelected) + if (deleteResp.data) { + enqueueSnackbar({ + message: `${childSelected.length} evaluations deleted.`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + props.onRefresh() + } + } catch (error) { + enqueueSnackbar({ + message: `Failed to delete Evaluation: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } + } + } + + const getStatusColor = (status) => { + switch (status) { + case 'pending': + return '#ffc107' + case 'completed': + return '#52b69a' + case 'error': + return '#f44336' + default: + return '#bcbcbc' + } + } + + const getPassRateColor = (passPcnt) => { + if (passPcnt > 90) { + return '#52b69a' + } else if (passPcnt >= 50) { + return '#f48c06' + } else { + return '#f44336' + } + } + + return ( + + + + props.handleSelect(event, props.item.id)} + /> + + +
    +
    + {props.item.name} + + {props.item.version}{' '} + {props.item.version > 0 && ( + setOpen(!open)}> + {props.item.version > 0 && open ? : } + + )} + + + + + {props.item.average_metrics?.averageCost && ( + + )} + + {props.item.average_metrics?.passPcnt >= 0 && ( + + )} + + + {moment(props.item.runDate).format('DD-MMM-YYYY, hh:mm:ss A')} + + + {props.item?.usedFlows?.map((usedFlow, index) => ( + + ))} + + + + goToDataset(props.item.datasetId)} + > + + + showResults(props.item)} + > + + + +
    + {open && childSelected.length > 0 && ( + + + + + + )} + {open && ( + <> + + + + + + + + + + + Version + Last Run + Average Metrics + Status + + + + + {props.rows.length > 0 && + props.rows.map((childItem, childIndex) => ( + + + + handleSelectChild(event, childItem.id)} + /> + + {childItem.version} + + {moment(childItem.runDate).format('DD-MMM-YYYY, hh:mm:ss A')} + + + + + {childItem.average_metrics?.averageCost && ( + + )} + + {childItem.average_metrics?.passPcnt >= 0 && ( + + )} + + + + + + + showResults(childItem)} + > + + + + + + ))} + +
    +
    +
    +
    +
    + + )} +
    + ) +} +EvaluationRunRow.propTypes = { + item: PropTypes.object, + selected: PropTypes.array, + rows: PropTypes.arrayOf(PropTypes.object), + theme: PropTypes.any, + customization: PropTypes.object, + onRefresh: PropTypes.func, + handleSelect: PropTypes.func +} +export default EvalsEvaluation diff --git a/packages/ui/src/views/evaluators/AddEditEvaluatorDialog.jsx b/packages/ui/src/views/evaluators/AddEditEvaluatorDialog.jsx new file mode 100644 index 000000000..d113b9a29 --- /dev/null +++ b/packages/ui/src/views/evaluators/AddEditEvaluatorDialog.jsx @@ -0,0 +1,556 @@ +import { createPortal } from 'react-dom' +import PropTypes from 'prop-types' +import { useState, useEffect, useCallback, useMemo } from 'react' +import { useDispatch } from 'react-redux' +import { enqueueSnackbar as enqueueSnackbarAction, closeSnackbar as closeSnackbarAction } from '@/store/actions' +import { cloneDeep } from 'lodash' + +// Material +import { IconButton, Dialog, DialogActions, DialogContent, DialogTitle, Box, Typography, OutlinedInput, Button, Stack } from '@mui/material' +import { GridActionsCellItem } from '@mui/x-data-grid' + +// Project imports +import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' +import ExpandTextDialog from '@/ui-component/dialog/ExpandTextDialog' +import { StyledPermissionButton } from '@/ui-component/button/RBACButtons' +import { Dropdown } from '@/ui-component/dropdown/Dropdown' +import { TooltipWithParser } from '@/ui-component/tooltip/TooltipWithParser' +import { Grid } from '@/ui-component/grid/Grid' +import SamplePromptDialog from '@/views/evaluators/SamplePromptDialog' + +// Icons +import { IconBulb, IconArrowsMaximize, IconPlus, IconPuzzle, IconX, IconNotes } from '@tabler/icons-react' +import DeleteIcon from '@mui/icons-material/Delete' + +// API +import evaluatorsApi from '@/api/evaluators' + +// utils +import useNotifier from '@/utils/useNotifier' + +// const +import { HIDE_CANVAS_DIALOG, SHOW_CANVAS_DIALOG } from '@/store/actions' +import { evaluators, evaluatorTypes, numericOperators } from './evaluatorConstant' + +const AddEditEvaluatorDialog = ({ show, dialogProps, onCancel, onConfirm }) => { + const portalElement = document.getElementById('portal') + + const dispatch = useDispatch() + + // ==============================|| Snackbar ||============================== // + + useNotifier() + + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const [name, setName] = useState('') + const [evaluatorType, setEvaluatorType] = useState('') + const [availableEvaluators, setAvailableEvaluators] = useState([]) + const [selectedEvaluator, setSelectedEvaluator] = useState() + const [selectedValue, setSelectedValue] = useState('') + const [selectedMetricValue, setSelectedMetricValue] = useState('0') + const [selectedMetricOperator, setSelectedMetricOperator] = useState('equals') + + const [showExpandDialog, setShowExpandDialog] = useState(false) + const [expandDialogProps, setExpandDialogProps] = useState({}) + + const [showSamplePromptDialog, setShowSamplePromptDialog] = useState(false) + const [samplePromptDialogProps, setSamplePromptDialogProps] = useState({}) + + const [outputSchema, setOutputSchema] = useState([]) + const [prompt, setPrompt] = useState('') + + const deleteItem = useCallback( + (id) => () => { + setTimeout(() => { + setOutputSchema((prevRows) => prevRows.filter((row) => row.id !== id)) + }) + }, + [] + ) + + const onSamplePromptSelected = (data) => { + setPrompt(data.prompt) + setOutputSchema(data.json) + setShowSamplePromptDialog(false) + } + + const onShowPromptDialogClicked = (inputParam) => { + const dialogProps = { + value: prompt, + inputParam, + confirmButtonName: 'Save', + cancelButtonName: 'Cancel' + } + setSamplePromptDialogProps(dialogProps) + setShowSamplePromptDialog(true) + } + const onExpandDialogClicked = (inputParam) => { + const dialogProps = { + value: prompt, + inputParam, + confirmButtonName: 'Save', + cancelButtonName: 'Cancel' + } + setExpandDialogProps(dialogProps) + setShowExpandDialog(true) + } + + const onExpandDialogSave = (newValue) => { + setShowExpandDialog(false) + setPrompt(newValue) + } + + const addNewRow = () => { + setTimeout(() => { + setOutputSchema((prevRows) => { + let allRows = [...cloneDeep(prevRows)] + const lastRowId = allRows.length ? allRows[allRows.length - 1].id + 1 : 1 + allRows.push({ + id: lastRowId, + property: '', + description: '', + type: '', + required: false + }) + return allRows + }) + }) + } + + const onRowUpdate = (newRow) => { + setTimeout(() => { + setOutputSchema((prevRows) => { + let allRows = [...cloneDeep(prevRows)] + const indexToUpdate = allRows.findIndex((row) => row.id === newRow.id) + if (indexToUpdate >= 0) { + allRows[indexToUpdate] = { ...newRow } + } + return allRows + }) + }) + } + + const columns = useMemo( + () => [ + { field: 'property', headerName: 'Property', editable: true, flex: 1 }, + { + field: 'type', + headerName: 'Type', + type: 'singleSelect', + valueOptions: ['string', 'number', 'boolean'], + editable: true, + width: 120 + }, + { field: 'description', headerName: 'Description', editable: true, flex: 1 }, + { field: 'required', headerName: 'Required', type: 'boolean', editable: true, width: 80 }, + { + field: 'actions', + type: 'actions', + width: 80, + getActions: (params) => [ + } label='Delete' onClick={deleteItem(params.id)} /> + ] + } + ], + [deleteItem] + ) + + const onEvaluatorTypeChange = (type) => { + setEvaluatorType(type) + setAvailableEvaluators(evaluators.filter((item) => item.type === type)) + setSelectedEvaluator('') + setSelectedValue('') + } + + const getCaption = () => { + if (selectedEvaluator) { + // return the description of the selected evaluator + const e = availableEvaluators.find((item) => item.name === selectedEvaluator) + if (e) { + return e.description + } + } + return '' + } + + const disableButton = () => { + if (!name || !evaluatorType) { + return true + } + if (evaluatorType === 'text') { + return !selectedEvaluator || !selectedValue + } else if (evaluatorType === 'numeric') { + return !selectedEvaluator || !selectedMetricOperator || !selectedMetricValue + } else if (evaluatorType === 'llm') { + return !prompt || outputSchema.length === 0 + } + } + + const updateEvaluator = async () => { + try { + const data = prepareData() + + const updateResp = await evaluatorsApi.updateEvaluator(dialogProps.data.id, data) + if (updateResp.data) { + enqueueSnackbar({ + message: `Evaluator ${name} updated`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm(updateResp.data.id) + } + } catch (error) { + enqueueSnackbar({ + message: `Failed to update Evaluator ${name}: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + onCancel() + } + } + + const prepareData = () => { + const data = { + name: name, + type: evaluatorType + } + if (evaluatorType === 'numeric') { + data.operator = selectedMetricOperator + data.value = selectedMetricValue + data.measure = selectedEvaluator + } else if (evaluatorType === 'text' || evaluatorType === 'json') { + data.operator = selectedEvaluator + data.value = selectedValue + } else if (evaluatorType === 'llm') { + data.outputSchema = outputSchema + data.prompt = prompt + } + return data + } + + const addEvaluator = async () => { + try { + const data = prepareData() + + const createResp = await evaluatorsApi.createEvaluator(data) + if (createResp.data) { + enqueueSnackbar({ + message: 'New Evaluator added', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm(createResp.data.id) + } + } catch (error) { + enqueueSnackbar({ + message: `Failed to add new Evaluator: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + onCancel() + } + } + + useEffect(() => { + if (dialogProps.data && dialogProps.type === 'EDIT') { + const data = dialogProps.data + onEvaluatorTypeChange(data.type) + setName(data.name) + + if ('text' === data.type || 'json' === data.type) { + setSelectedEvaluator(data.operator) + setSelectedValue(data.value) + } else if ('numeric' === data.type) { + setSelectedValue(data.measure) + setSelectedMetricValue(data.value) + setSelectedMetricOperator(data.operator) + setSelectedEvaluator(data.measure) + } else if ('llm' === data.type) { + setPrompt(data.prompt) + setOutputSchema(data.outputSchema) + } + } else if (dialogProps.data && dialogProps.type === 'ADD') { + const data = dialogProps.data + onEvaluatorTypeChange(data.type) + setName(data.name) + setOutputSchema([]) + } + + return () => { + // reset all values + setName('') + setEvaluatorType('') + setAvailableEvaluators([]) + setSelectedEvaluator('') + setSelectedValue('') + setSelectedMetricValue('0') + setSelectedMetricOperator('equals') + setOutputSchema([]) + } + }, [dialogProps]) + + useEffect(() => { + if (show) dispatch({ type: SHOW_CANVAS_DIALOG }) + else dispatch({ type: HIDE_CANVAS_DIALOG }) + return () => dispatch({ type: HIDE_CANVAS_DIALOG }) + }, [show, dispatch]) + + const component = show ? ( + + +
    + + {dialogProps.type === 'ADD' ? 'Add Evaluator' : 'Edit Evaluator'} +
    +
    + + + Name + setName(e.target.value)} + value={name ?? ''} + /> + + + Evaluator Type + onEvaluatorTypeChange(newValue)} + value={evaluatorType} + /> + + {evaluatorType && evaluatorType !== 'llm' && ( + + Available Evaluators + setSelectedEvaluator(e)} + value={selectedEvaluator} + /> + + )} + {evaluatorType === 'numeric' && selectedEvaluator && ( + <> + + Select Operator + setSelectedMetricOperator(e)} + value={selectedMetricOperator} + /> + + + Value + setSelectedMetricValue(e.target.value)} + value={selectedMetricValue ?? '0'} + /> + + {getCaption()} + + + + )} + {evaluatorType === 'text' && selectedEvaluator && ( + <> + + Value + setSelectedValue(e.target.value)} + value={selectedValue} + sx={{ mb: 2 }} + /> + + {getCaption()} + + + + )} + {evaluatorType === 'llm' && ( + <> + + + + Output Schema + + + + + + + + + + +
    + Prompt +
    + {prompt && ( + + onExpandDialogClicked({ + label: 'Evaluation Prompt', + name: 'evaluationPrompt', + type: 'string' + }) + } + > + + + )} +
    + setPrompt(e.target.value)} + value={prompt} + /> +
    +
    + + + You can use {question} {actualOutput}{' '} + {expectedOutput} to inject runtime values into your prompt. + +
    +
    +
    + + )} +
    + + + (dialogProps.type === 'ADD' ? addEvaluator() : updateEvaluator())} + > + {dialogProps.confirmButtonName} + + + + setShowExpandDialog(false)} + onConfirm={(newValue) => onExpandDialogSave(newValue)} + > + setShowSamplePromptDialog(false)} + onConfirm={(newValue) => onSamplePromptSelected(newValue)} + > +
    + ) : null + + return createPortal(component, portalElement) +} + +AddEditEvaluatorDialog.propTypes = { + show: PropTypes.bool, + dialogProps: PropTypes.object, + onCancel: PropTypes.func, + onConfirm: PropTypes.func +} + +export default AddEditEvaluatorDialog diff --git a/packages/ui/src/views/evaluators/SamplePromptDialog.jsx b/packages/ui/src/views/evaluators/SamplePromptDialog.jsx new file mode 100644 index 000000000..e71c60683 --- /dev/null +++ b/packages/ui/src/views/evaluators/SamplePromptDialog.jsx @@ -0,0 +1,173 @@ +import { createPortal } from 'react-dom' +import PropTypes from 'prop-types' +import { useState, useEffect, useMemo } from 'react' + +// Material +import { Dialog, DialogActions, DialogContent, DialogTitle, Box, Typography, Divider, Stack, OutlinedInput, Button } from '@mui/material' + +// Project imports +import { StyledButton } from '@/ui-component/button/StyledButton' +import { Dropdown } from '@/ui-component/dropdown/Dropdown' +import { TooltipWithParser } from '@/ui-component/tooltip/TooltipWithParser' +import { Grid } from '@/ui-component/grid/Grid' + +// Icons +import { IconTestPipe2 } from '@tabler/icons-react' + +// utils +import useNotifier from '@/utils/useNotifier' + +// const +import { evaluationPrompts } from '@/views/evaluators/evaluationPrompts' + +const SamplePromptDialog = ({ show, dialogProps, onCancel, onConfirm }) => { + const portalElement = document.getElementById('portal') + useNotifier() + + const [selectedPromptName, setSelectedPromptName] = useState('') + const [selectedConfig, setSelectedConfig] = useState([]) + const [selectedPromptText, setSelectedPromptText] = useState('') + + useEffect(() => { + resetData() + return () => { + resetData() + } + }, [dialogProps]) + + const resetData = () => { + setSelectedPromptName('') + setSelectedConfig([]) + setSelectedPromptText('') + } + + const onSelected = async (selectedPromptName) => { + if (selectedPromptName) { + const selected = evaluationPrompts.find((prompt) => prompt.name === selectedPromptName) + setSelectedConfig(selected.json) + setSelectedPromptText(selected.prompt) + setSelectedPromptName(selected.name) + } else { + setSelectedPromptName('') + setSelectedConfig([]) + setSelectedPromptText('') + } + } + + const onConfirmPrompt = async () => { + const selected = evaluationPrompts.find((prompt) => prompt.name === selectedPromptName) + onConfirm(selected) + } + + const disableButton = () => { + return !selectedPromptName || !selectedPromptText + } + + const columns = useMemo( + () => [ + { field: 'property', headerName: 'Property', flex: 1 }, + { + field: 'type', + headerName: 'Type', + type: 'singleSelect', + valueOptions: ['string', 'number', 'boolean'], + width: 120 + }, + { field: 'description', headerName: 'Description', flex: 1 }, + { field: 'required', headerName: 'Required', type: 'boolean', width: 80 }, + { + field: 'actions', + type: 'actions', + width: 80, + getActions: () => [] + } + ], + [] + ) + + const component = show ? ( + + +
    + + Sample Prompts +
    +
    + + + + + + Available Prompts * + + + + {selectedPromptName && ( + + + + Output Schema + + + + + + )} + {selectedPromptName && ( + +
    + Prompt +
    + setSelectedPromptText(e.target.value)} + value={selectedPromptText} + /> +
    + )} +
    +
    + + + onConfirmPrompt()} + > + {'Select Prompt'} + + +
    + ) : null + + return createPortal(component, portalElement) +} + +SamplePromptDialog.propTypes = { + show: PropTypes.bool, + dialogProps: PropTypes.object, + onCancel: PropTypes.func, + onConfirm: PropTypes.func +} + +export default SamplePromptDialog diff --git a/packages/ui/src/views/evaluators/evaluationPrompts.js b/packages/ui/src/views/evaluators/evaluationPrompts.js new file mode 100644 index 000000000..e90263875 --- /dev/null +++ b/packages/ui/src/views/evaluators/evaluationPrompts.js @@ -0,0 +1,26 @@ +export const evaluationPrompts = [ + { + name: 'correctness', + label: 'Correctness', + json: [{ id: 1, property: 'score', description: 'graded score', type: 'number', required: true }], + prompt: `Respond with a numeric score based on how well the following response compare to the ground truth. Grade only based expected response: + +Ground Truth: {expectedOutput} + +DATA: +--------- +Response: {actualOutput} +--------- + +Do not include any other information in your response. Do not evaluate correctness to the question, only match it to the reference. It is very critical that you answer only with a numeric score. Is the assistants answer grounded in and similar to the ground truth answer? A score of 1 means that the assistant answer is not at all grounded in the ground truth answer, while a score of 5 means that the assistant answer contains some information that is grounded in and similar to the ground truth answer. A score of 10 means that the assistant answer is fully ground and similar to the ground truth answer. Please provide a score between 1 and 10. Do not generate any newlines in the response.` + }, + { + name: 'hallucination', + label: 'Hallucination', + json: [ + { id: 1, property: 'score', description: 'provide a score between 0 and 1', type: 'number', required: true }, + { id: 2, property: 'reasoning', description: 'provide a one sentence reasoning', type: 'string', required: true } + ], + prompt: `Evaluate the degree of hallucination in the generation on a continuous scale from 0 to 1. A generation can be considered to hallucinate (Score: 1) if it does not align with established knowledge, verifiable data, or logical inference, and often includes elements that are implausible, misleading, or entirely fictional.\n\nExample:\nQuery: Can eating carrots improve your vision?\nGeneration: Yes, eating carrots significantly improves your vision, especially at night. This is why people who eat lots of carrots never need glasses. Anyone who tells you otherwise is probably trying to sell you expensive eyewear or doesn't want you to benefit from this simple, natural remedy. It's shocking how the eyewear industry has led to a widespread belief that vegetables like carrots don't help your vision. People are so gullible to fall for these money-making schemes.\n\nScore: 1.0\nReasoning: Carrots only improve vision under specific circumstances, namely a lack of vitamin A that leads to decreased vision. Thus, the statement โ€˜eating carrots significantly improves your visionโ€™ is wrong. Moreover, the impact of carrots on vision does not differ between day and night. So also the clause โ€˜especially is nightโ€™ is wrong. Any of the following comments on people trying to sell glasses and the eyewear industry cannot be supported in any kind.\n\nInput:\nQuery: {question}\nGeneration: {actualOutput}\n\nThink step by step.` + } +] diff --git a/packages/ui/src/views/evaluators/evaluatorConstant.js b/packages/ui/src/views/evaluators/evaluatorConstant.js new file mode 100644 index 000000000..79272cf0f --- /dev/null +++ b/packages/ui/src/views/evaluators/evaluatorConstant.js @@ -0,0 +1,143 @@ +// TODO: Move this to a config file +export const evaluators = [ + { + type: 'text', + name: 'ContainsAny', + label: 'Contains Any', + description: 'Returns true if any of the specified comma separated values are present in the response.' + }, + { + type: 'text', + name: 'ContainsAll', + label: 'Contains All', + description: 'Returns true if ALL of the specified comma separated values are present in the response.' + }, + { + type: 'text', + name: 'DoesNotContainAny', + label: 'Does Not Contains Any', + description: 'Returns true if any of the specified comma separated values are present in the response.' + }, + { + type: 'text', + name: 'DoesNotContainAll', + label: 'Does Not Contains All', + description: 'Returns true if ALL of the specified comma separated values are present in the response.' + }, + { + type: 'text', + name: 'StartsWith', + label: 'Starts With', + description: 'Returns true if the response starts with the specified value.' + }, + { + type: 'text', + name: 'NotStartsWith', + label: 'Does Not Start With', + description: 'Returns true if the response does not start with the specified value.' + }, + { + type: 'json', + name: 'IsValidJSON', + label: 'Is Valid JSON', + description: 'Returns true if the response is a valid JSON.' + }, + { + type: 'json', + name: 'IsNotValidJSON', + label: 'Is Not a Valid JSON', + description: 'Returns true if the response is a not a valid JSON.' + }, + { + type: 'numeric', + name: 'totalTokens', + label: 'Total Tokens', + description: 'Sum of Prompt Tokens and Completion Tokens.' + }, + { + type: 'numeric', + label: 'Prompt Tokens', + name: 'promptTokens', + description: 'This is the number of tokens in your prompt.' + }, + { + type: 'numeric', + label: 'Completion Tokens', + name: 'completionTokens', + description: 'Completion tokens are any tokens that the model generates in response to your input.' + }, + { + type: 'numeric', + label: 'Total API Latency', + name: 'apiLatency', + description: 'Total time taken for the Flowise Prediction API call (milliseconds).' + }, + { + type: 'numeric', + label: 'LLM Latency', + name: 'llm', + description: 'Actual LLM invocation time (milliseconds).' + }, + { + type: 'numeric', + label: 'Chatflow Latency', + name: 'chain', + description: 'Actual time spent in executing the chatflow (milliseconds).' + }, + { + type: 'numeric', + label: 'Output Chars Length', + name: 'responseLength', + description: 'Number of characters in the response.' + } +] + +export const evaluatorTypes = [ + { + label: 'Evaluate Result (Text Based)', + name: 'text', + description: 'Set of Evaluators to evaluate the result of a Chatflow.' + }, + { + label: 'Evaluate Result (JSON)', + name: 'json', + description: 'Set of Evaluators to evaluate the JSON response of a Chatflow.' + }, + { + label: 'Evaluate Metrics (Numeric)', + name: 'numeric', + description: 'Set of Evaluators that evaluate the metrics (latency, tokens, cost, length of response) of a Chatflow.' + }, + { + label: 'LLM based Grading (JSON)', + name: 'llm', + description: 'Post execution, grades the answers by using an LLM.' + } +] + +export const numericOperators = [ + { + label: 'Equals', + name: 'equals' + }, + { + label: 'Not Equals', + name: 'notEquals' + }, + { + label: 'Greater Than', + name: 'greaterThan' + }, + { + label: 'Less Than', + name: 'lessThan' + }, + { + label: 'Greater Than or Equals', + name: 'greaterThanOrEquals' + }, + { + label: 'Less Than or Equals', + name: 'lessThanOrEquals' + } +] diff --git a/packages/ui/src/views/evaluators/index.jsx b/packages/ui/src/views/evaluators/index.jsx new file mode 100644 index 000000000..f1738f923 --- /dev/null +++ b/packages/ui/src/views/evaluators/index.jsx @@ -0,0 +1,575 @@ +import { useEffect, useState } from 'react' +import { useDispatch, useSelector } from 'react-redux' + +// material-ui +import { Chip, Skeleton, Box, Stack, TableContainer, Paper, Table, TableHead, TableRow, TableCell, TableBody, Button } from '@mui/material' +import { useTheme } from '@mui/material/styles' + +// project imports +import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction } from '@/store/actions' +import MainCard from '@/ui-component/cards/MainCard' +import ViewHeader from '@/layout/MainLayout/ViewHeader' +import ErrorBoundary from '@/ErrorBoundary' +import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' +import AddEditEvaluatorDialog from '@/views/evaluators/AddEditEvaluatorDialog' +import { StyledTableCell, StyledTableRow } from '@/ui-component/table/TableStyles' +import { PermissionIconButton, StyledPermissionButton } from '@/ui-component/button/RBACButtons' +import TablePagination, { DEFAULT_ITEMS_PER_PAGE } from '@/ui-component/pagination/TablePagination' +import { truncateString } from '@/utils/genericHelper' + +// API +import evaluatorsApi from '@/api/evaluators' +import moment from 'moment/moment' + +// Hooks +import useNotifier from '@/utils/useNotifier' +import useConfirm from '@/hooks/useConfirm' +import useApi from '@/hooks/useApi' +import { useError } from '@/store/context/ErrorContext' + +// icons +import empty_evaluatorSVG from '@/assets/images/empty_evaluators.svg' +import { IconTrash, IconPlus, IconJson, IconX, IconNumber123, IconAbc, IconAugmentedReality } from '@tabler/icons-react' + +// const +import { evaluators as evaluatorsOptions, numericOperators } from '../evaluators/evaluatorConstant' + +// ==============================|| Evaluators ||============================== // + +const Evaluators = () => { + const theme = useTheme() + const customization = useSelector((state) => state.customization) + const dispatch = useDispatch() + const { confirm } = useConfirm() + useNotifier() + const { error } = useError() + + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const [search, setSearch] = useState('') + const [isLoading, setLoading] = useState(true) + const [showEvaluatorDialog, setShowEvaluatorDialog] = useState(false) + const [dialogProps, setDialogProps] = useState({}) + const [evaluators, setEvaluators] = useState([]) + + const getAllEvaluators = useApi(evaluatorsApi.getAllEvaluators) + + /* Table Pagination */ + const [currentPage, setCurrentPage] = useState(1) + const [pageLimit, setPageLimit] = useState(DEFAULT_ITEMS_PER_PAGE) + const [total, setTotal] = useState(0) + const onChange = (page, pageLimit) => { + setCurrentPage(page) + setPageLimit(pageLimit) + refresh(page, pageLimit) + } + + const refresh = (page, limit) => { + const params = { + page: page || currentPage, + limit: limit || pageLimit + } + getAllEvaluators.request(params) + } + + const onSearchChange = (event) => { + setSearch(event.target.value) + } + + const newEvaluator = () => { + const dialogProp = { + type: 'ADD', + cancelButtonName: 'Cancel', + confirmButtonName: 'Add', + data: {} + } + setDialogProps(dialogProp) + setShowEvaluatorDialog(true) + } + + const edit = (item) => { + const dialogProp = { + type: 'EDIT', + cancelButtonName: 'Cancel', + confirmButtonName: 'Save', + data: item + } + setDialogProps(dialogProp) + setShowEvaluatorDialog(true) + } + + const deleteEvaluator = async (item) => { + const confirmPayload = { + title: `Delete`, + description: `Delete Evaluator ${item.name}?`, + confirmButtonName: 'Delete', + cancelButtonName: 'Cancel' + } + const isConfirmed = await confirm(confirmPayload) + + if (isConfirmed) { + try { + const deleteResp = await evaluatorsApi.deleteEvaluator(item.id) + if (deleteResp.data) { + enqueueSnackbar({ + message: 'Evaluator deleted', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm() + } + } catch (error) { + enqueueSnackbar({ + message: `Failed to delete Evaluator: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } + } + } + + const onConfirm = () => { + setShowEvaluatorDialog(false) + refresh(currentPage, pageLimit) + } + + function filterDatasets(data) { + return data.name.toLowerCase().indexOf(search.toLowerCase()) > -1 + } + + useEffect(() => { + refresh(currentPage, pageLimit) + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + useEffect(() => { + if (getAllEvaluators.data) { + setEvaluators(getAllEvaluators.data.data) + setTotal(getAllEvaluators.data.total) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getAllEvaluators.data]) + + useEffect(() => { + setLoading(getAllEvaluators.loading) + }, [getAllEvaluators.loading]) + + return ( + <> + + {error ? ( + + ) : ( + + + } + > + New Evaluator + + + {!isLoading && evaluators.length <= 0 ? ( + + + empty_evaluatorSVG + +
    No Evaluators Yet
    +
    + ) : ( + <> + + + + + Type + Name + Details + Last Updated + + + + + {isLoading ? ( + <> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ) : ( + <> + {evaluators.filter(filterDatasets).map((ds, index) => ( + <> + + edit(ds)}> + {ds?.type === 'numeric' && ( + + } + label='Numeric' + variant='outlined' + /> + + )} + {ds?.type === 'text' && ( + + } + label='Text Based' + variant='outlined' + /> + + )} + {ds?.type === 'json' && ( + + } + label='JSON Based' + variant='outlined' + /> + + )} + {ds?.type === 'llm' && ( + + } + label='LLM Based' + variant='outlined' + /> + + )} + + edit(ds)} component='th' scope='row'> + {ds.name} + + edit(ds)}> + {ds?.type === 'numeric' && ( + + + Measure:{' '} + { + [ + ...evaluatorsOptions, + ...numericOperators + ].find((item) => item.name === ds?.measure) + ?.label + } + + } + /> + + Operator:{' '} + { + [ + ...evaluatorsOptions, + ...numericOperators + ].find((item) => item.name === ds?.operator) + ?.label + } + + } + /> + + Value: {ds?.value} + + } + /> + + )} + {ds?.type === 'text' && ( + + + Operator:{' '} + { + [ + ...evaluatorsOptions, + ...numericOperators + ].find((item) => item.name === ds?.operator) + ?.label + } + + } + /> + + Value: {ds?.value} + + } + /> + + )} + {ds?.type === 'json' && ( + + + Operator:{' '} + { + [...evaluatorsOptions].find( + (item) => item.name === ds?.operator + )?.label + } + + } + /> + + )} + {ds?.type === 'llm' && ( + + + Prompt: {truncateString(ds?.prompt, 100)} + + } + /> + + Output Schema Elements:{' '} + {ds?.outputSchema.length > 0 + ? ds?.outputSchema + .map((item) => item.property) + .join(', ') + : 'None'} + + } + /> + + )} + + edit(ds)}> + {moment(ds.updatedDate).format('MMMM Do YYYY, hh:mm A')} + + + deleteEvaluator(ds)} + > + + + + + + ))} + + )} + +
    +
    + {/* Pagination and Page Size Controls */} + + + )} +
    + )} +
    + {showEvaluatorDialog && ( + setShowEvaluatorDialog(false)} + onConfirm={onConfirm} + > + )} + + + ) +} + +export default Evaluators diff --git a/packages/ui/src/views/files/index.jsx b/packages/ui/src/views/files/index.jsx new file mode 100644 index 000000000..e5b952c83 --- /dev/null +++ b/packages/ui/src/views/files/index.jsx @@ -0,0 +1,152 @@ +import { useEffect, useState } from 'react' + +// material-ui +import { Box, Button, Stack } from '@mui/material' + +// project imports +import MainCard from '@/ui-component/cards/MainCard' +import WorkflowEmptySVG from '@/assets/images/workflow_empty.svg' +import ViewHeader from '@/layout/MainLayout/ViewHeader' +import ErrorBoundary from '@/ErrorBoundary' +import { FilesTable } from '@/ui-component/table/FilesTable' +import useConfirm from '@/hooks/useConfirm' +import useNotifier from '@/utils/useNotifier' +import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction } from '@/store/actions' +import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' + +// API +import filesApi from '@/api/files' + +// Hooks +import useApi from '@/hooks/useApi' + +// icons +import { IconX } from '@tabler/icons-react' +import { useDispatch } from 'react-redux' +import { useError } from '@/store/context/ErrorContext' + +// ==============================|| CHATFLOWS ||============================== // + +const Files = () => { + const { confirm } = useConfirm() + + const [isLoading, setLoading] = useState(true) + const { error, setError } = useError() + const [files, setFiles] = useState([]) + const [search, setSearch] = useState('') + + const getAllFilesApi = useApi(filesApi.getAllFiles) + + const dispatch = useDispatch() + + useNotifier() + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const onSearchChange = (event) => { + setSearch(event.target.value) + } + + function filterFiles(data) { + return ( + data.name.toLowerCase().indexOf(search.toLowerCase()) > -1 || + (data.category && data.category.toLowerCase().indexOf(search.toLowerCase()) > -1) + ) + } + + const handleDeleteFile = async (file) => { + const confirmPayload = { + title: `Delete`, + description: `Delete ${file.name}? This process cannot be undone.`, + confirmButtonName: 'Delete', + cancelButtonName: 'Cancel' + } + const isConfirmed = await confirm(confirmPayload) + + if (isConfirmed) { + try { + const deleteResponse = await filesApi.deleteFile(file.path) + if (deleteResponse?.data) { + enqueueSnackbar({ + message: 'File deleted', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + } + await getAllFilesApi.request() + } catch (error) { + setError(error) + enqueueSnackbar({ + message: typeof error.response.data === 'object' ? error.response.data.message : error.response.data, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } + } + } + + useEffect(() => { + getAllFilesApi.request() + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + useEffect(() => { + setLoading(getAllFilesApi.loading) + }, [getAllFilesApi.loading]) + + useEffect(() => { + if (getAllFilesApi.data) { + try { + const files = getAllFilesApi.data + setFiles(files) + } catch (e) { + console.error(e) + } + } + }, [getAllFilesApi.data]) + + return ( + + {error ? ( + + ) : ( + + + + {!isLoading && (!getAllFilesApi.data || getAllFilesApi.data.length === 0) && ( + + + WorkflowEmptySVG + +
    No Files Yet
    +
    + )} +
    + )} + + +
    + ) +} + +export default Files diff --git a/packages/ui/src/views/marketplaces/MarketplaceCanvas.jsx b/packages/ui/src/views/marketplaces/MarketplaceCanvas.jsx index 0c3040538..b1b9c2b11 100644 --- a/packages/ui/src/views/marketplaces/MarketplaceCanvas.jsx +++ b/packages/ui/src/views/marketplaces/MarketplaceCanvas.jsx @@ -1,9 +1,10 @@ -import { useEffect, useRef } from 'react' +import { useEffect, useRef, useState } from 'react' import ReactFlow, { Controls, Background, useNodesState, useEdgesState } from 'reactflow' import 'reactflow/dist/style.css' import '@/views/canvas/index.css' import { useLocation, useNavigate } from 'react-router-dom' +import { useSelector } from 'react-redux' // material-ui import { Toolbar, Box, AppBar } from '@mui/material' @@ -14,6 +15,9 @@ import MarketplaceCanvasNode from './MarketplaceCanvasNode' import MarketplaceCanvasHeader from './MarketplaceCanvasHeader' import StickyNote from '../canvas/StickyNote' +// icons +import { IconMagnetFilled, IconMagnetOff, IconArtboard, IconArtboardOff } from '@tabler/icons-react' + const nodeTypes = { customNode: MarketplaceCanvasNode, stickyNote: StickyNote } const edgeTypes = { buttonedge: '' } @@ -22,6 +26,7 @@ const edgeTypes = { buttonedge: '' } const MarketplaceCanvas = () => { const theme = useTheme() const navigate = useNavigate() + const customization = useSelector((state) => state.customization) const { state } = useLocation() const { flowData, name } = state @@ -30,6 +35,8 @@ const MarketplaceCanvas = () => { const [nodes, setNodes, onNodesChange] = useNodesState() const [edges, setEdges, onEdgesChange] = useEdgesState() + const [isSnappingEnabled, setIsSnappingEnabled] = useState(false) + const [isBackgroundEnabled, setIsBackgroundEnabled] = useState(true) const reactFlowWrapper = useRef(null) @@ -86,16 +93,40 @@ const MarketplaceCanvas = () => { edgeTypes={edgeTypes} fitView minZoom={0.1} + snapGrid={[25, 25]} + snapToGrid={isSnappingEnabled} > - + > + + + + {isBackgroundEnabled && }
    diff --git a/packages/ui/src/views/marketplaces/MarketplaceCanvasHeader.jsx b/packages/ui/src/views/marketplaces/MarketplaceCanvasHeader.jsx index 3fbe78f27..7e8e03f0c 100644 --- a/packages/ui/src/views/marketplaces/MarketplaceCanvasHeader.jsx +++ b/packages/ui/src/views/marketplaces/MarketplaceCanvasHeader.jsx @@ -8,6 +8,7 @@ import { StyledButton } from '@/ui-component/button/StyledButton' // icons import { IconCopy, IconChevronLeft } from '@tabler/icons-react' +import { Available } from '@/ui-component/rbac/available' // ==============================|| CANVAS HEADER ||============================== // @@ -52,17 +53,19 @@ const MarketplaceCanvasHeader = ({ flowName, flowData, onChatflowCopy }) => { - - onChatflowCopy(flowData)} - startIcon={} - > - Use Template - - + + + onChatflowCopy(flowData)} + startIcon={} + > + Use Template + + + ) } diff --git a/packages/ui/src/views/marketplaces/index.jsx b/packages/ui/src/views/marketplaces/index.jsx index 52e87994c..1f3647614 100644 --- a/packages/ui/src/views/marketplaces/index.jsx +++ b/packages/ui/src/views/marketplaces/index.jsx @@ -21,7 +21,10 @@ import { MenuItem, Button, Tabs, - Tab + Autocomplete, + TextField, + Chip, + Tooltip } from '@mui/material' import { useTheme } from '@mui/material/styles' import { IconLayoutGrid, IconList, IconX } from '@tabler/icons-react' @@ -37,6 +40,9 @@ import ErrorBoundary from '@/ErrorBoundary' import { TabPanel } from '@/ui-component/tabs/TabPanel' import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction } from '@/store/actions' import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' +import { PermissionTab } from '@/ui-component/button/RBACButtons' +import { Available } from '@/ui-component/rbac/available' +import ShareWithWorkspaceDialog from '@/ui-component/dialog/ShareWithWorkspaceDialog' // API import marketplacesApi from '@/api/marketplaces' @@ -44,14 +50,18 @@ import marketplacesApi from '@/api/marketplaces' // Hooks import useApi from '@/hooks/useApi' import useConfirm from '@/hooks/useConfirm' +import { useAuth } from '@/hooks/useAuth' + +// Utils +import useNotifier from '@/utils/useNotifier' // const import { baseURL, AGENTFLOW_ICONS } from '@/store/constant' import { gridSpacing } from '@/store/constant' -import useNotifier from '@/utils/useNotifier' +import { useError } from '@/store/context/ErrorContext' const badges = ['POPULAR', 'NEW'] -const types = ['Chatflow', 'Agentflow', 'AgentflowV2', 'Tool'] +const types = ['Chatflow', 'AgentflowV2', 'Tool'] const framework = ['Langchain', 'LlamaIndex'] const MenuProps = { PaperProps: { @@ -69,9 +79,9 @@ const Marketplace = () => { useNotifier() const theme = useTheme() + const { error, setError } = useError() const [isLoading, setLoading] = useState(true) - const [error, setError] = useState(null) const [images, setImages] = useState({}) const [icons, setIcons] = useState({}) const [usecases, setUsecases] = useState([]) @@ -99,6 +109,26 @@ const Marketplace = () => { const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) const { confirm } = useConfirm() + const { hasPermission } = useAuth() + + const [showShareTemplateDialog, setShowShareTemplateDialog] = useState(false) + const [shareTemplateDialogProps, setShareTemplateDialogProps] = useState({}) + + const share = (template) => { + const dialogProps = { + type: 'EDIT', + cancelButtonName: 'Cancel', + confirmButtonName: 'Share', + data: { + id: template.id, + name: template.name, + title: 'Share Custom Template', + itemType: 'custom_template' + } + } + setShareTemplateDialogProps(dialogProps) + setShowShareTemplateDialog(true) + } const getSelectStyles = (borderColor, isDarkMode) => ({ '& .MuiOutlinedInput-notchedOutline': { @@ -320,8 +350,12 @@ const Marketplace = () => { } useEffect(() => { - getAllTemplatesMarketplacesApi.request() - + if (hasPermission('templates:marketplace')) { + getAllTemplatesMarketplacesApi.request() + } else if (hasPermission('templates:custom')) { + setActiveTabValue(1) + getAllCustomTemplatesApi.request() + } // eslint-disable-next-line react-hooks/exhaustive-deps }, []) @@ -345,13 +379,17 @@ const Marketplace = () => { images[flows[i].id] = [] icons[flows[i].id] = [] for (let j = 0; j < nodes.length; j += 1) { + if (nodes[j].data.name === 'stickyNote' || nodes[j].data.name === 'stickyNoteAgentflow') continue const foundIcon = AGENTFLOW_ICONS.find((icon) => icon.name === nodes[j].data.name) if (foundIcon) { icons[flows[i].id].push(foundIcon) } else { const imageSrc = `${baseURL}/api/v1/node-icon/${nodes[j].data.name}` - if (!images[flows[i].id].includes(imageSrc)) { - images[flows[i].id].push(imageSrc) + if (!images[flows[i].id].some((img) => img.imageSrc === imageSrc)) { + images[flows[i].id].push({ + imageSrc, + label: nodes[j].data.name + }) } } } @@ -368,9 +406,10 @@ const Marketplace = () => { }, [getAllTemplatesMarketplacesApi.data]) useEffect(() => { - if (getAllTemplatesMarketplacesApi.error) { + if (getAllTemplatesMarketplacesApi.error && setError) { setError(getAllTemplatesMarketplacesApi.error) } + // eslint-disable-next-line react-hooks/exhaustive-deps }, [getAllTemplatesMarketplacesApi.error]) useEffect(() => { @@ -420,9 +459,10 @@ const Marketplace = () => { }, [getAllCustomTemplatesApi.data]) useEffect(() => { - if (getAllCustomTemplatesApi.error) { + if (getAllCustomTemplatesApi.error && setError) { setError(getAllCustomTemplatesApi.error) } + // eslint-disable-next-line react-hooks/exhaustive-deps }, [getAllCustomTemplatesApi.error]) return ( @@ -584,76 +624,134 @@ const Marketplace = () => { - - - - - - - {usecases.map((usecase, index) => ( - { - setSelectedUsecases( - event.target.checked - ? [...selectedUsecases, usecase] - : selectedUsecases.filter((item) => item !== usecase) - ) - }} - /> - } - label={usecase} - /> - ))} - - {selectedUsecases.length > 0 && ( - - )} + {hasPermission('templates:marketplace') && hasPermission('templates:custom') && ( + + + + + + setSelectedUsecases(newValue)} + disableCloseOnSelect + getOptionLabel={(option) => option} + isOptionEqualToValue={(option, value) => option === value} + renderOption={(props, option, { selected }) => { + const isDisabled = eligibleUsecases.length > 0 && !eligibleUsecases.includes(option) - {!view || view === 'card' ? ( - <> - {isLoading ? ( - - - - - - ) : ( - - {getAllTemplatesMarketplacesApi.data - ?.filter(filterByBadge) - .filter(filterByType) - .filter(filterFlows) - .filter(filterByFramework) - .filter(filterByUsecases) - .map((data, index) => ( - - {data.badge && ( - - {(data.type === 'Chatflow' || + return ( +
  • + + +
  • + ) + }} + renderInput={(params) => } + sx={{ + width: 300 + }} + limitTags={2} + renderTags={(value, getTagProps) => { + const totalTags = value.length + const limitTags = 2 + + return ( + <> + {value.slice(0, limitTags).map((option, index) => ( + + ))} + + {totalTags > limitTags && ( + + {value.slice(limitTags).map((item, i) => ( +
  • {item}
  • + ))} + + } + placement='top' + > + +{totalTags - limitTags} +
    + )} + + ) + }} + slotProps={{ + paper: { + sx: { + boxShadow: '0 4px 12px rgba(0, 0, 0, 0.2)' + } + } + }} + /> +
    + )} + + + {!view || view === 'card' ? ( + <> + {isLoading ? ( + + + + + + ) : ( + + {getAllTemplatesMarketplacesApi.data + ?.filter(filterByBadge) + .filter(filterByType) + .filter(filterFlows) + .filter(filterByFramework) + .filter(filterByUsecases) + .map((data, index) => ( + + {data.badge && ( + + {(data.type === 'Chatflow' || + data.type === 'Agentflow' || + data.type === 'AgentflowV2') && ( + goToCanvas(data)} + data={data} + images={images[data.id]} + icons={icons[data.id]} + /> + )} + {data.type === 'Tool' && ( + goToTool(data)} /> + )} + + )} + {!data.badge && + (data.type === 'Chatflow' || data.type === 'Agentflow' || data.type === 'AgentflowV2') && ( { icons={icons[data.id]} /> )} - {data.type === 'Tool' && ( - goToTool(data)} /> - )} - - )} - {!data.badge && - (data.type === 'Chatflow' || - data.type === 'Agentflow' || - data.type === 'AgentflowV2') && ( - goToCanvas(data)} - data={data} - images={images[data.id]} - icons={icons[data.id]} - /> + {!data.badge && data.type === 'Tool' && ( + goToTool(data)} /> )} - {!data.badge && data.type === 'Tool' && ( - goToTool(data)} /> - )} - - ))} - - )} - - ) : ( - - )} - - {!isLoading && (!getAllTemplatesMarketplacesApi.data || getAllTemplatesMarketplacesApi.data.length === 0) && ( - - - WorkflowEmptySVG - -
    No Marketplace Yet
    -
    - )} -
    - - - {templateUsecases.map((usecase, index) => ( - { - setSelectedTemplateUsecases( - event.target.checked - ? [...selectedTemplateUsecases, usecase] - : selectedTemplateUsecases.filter((item) => item !== usecase) - ) - }} - /> - } - label={usecase} + + ))} + + )} + + ) : ( + - ))} - - {selectedTemplateUsecases.length > 0 && ( - - )} - {!view || view === 'card' ? ( - <> - {isLoading ? ( - - - - - - ) : ( - - {getAllCustomTemplatesApi.data - ?.filter(filterByBadge) - .filter(filterByType) - .filter(filterFlows) - .filter(filterByFramework) - .filter(filterByUsecases) - .map((data, index) => ( - - {data.badge && ( - - {(data.type === 'Chatflow' || + )} + + {!isLoading && + (!getAllTemplatesMarketplacesApi.data || getAllTemplatesMarketplacesApi.data.length === 0) && ( + + + WorkflowEmptySVG + +
    No Marketplace Yet
    +
    + )} +
    +
    + + + + {templateUsecases.map((usecase, index) => ( + { + setSelectedTemplateUsecases( + event.target.checked + ? [...selectedTemplateUsecases, usecase] + : selectedTemplateUsecases.filter((item) => item !== usecase) + ) + }} + /> + } + label={usecase} + /> + ))} + + {selectedTemplateUsecases.length > 0 && ( + + )} + {!view || view === 'card' ? ( + <> + {isLoading ? ( + + + + + + ) : ( + + {getAllCustomTemplatesApi.data + ?.filter(filterByBadge) + .filter(filterByType) + .filter(filterFlows) + .filter(filterByFramework) + .filter(filterByUsecases) + .map((data, index) => ( + + {data.badge && ( + + {(data.type === 'Chatflow' || + data.type === 'Agentflow' || + data.type === 'AgentflowV2') && ( + goToCanvas(data)} + data={data} + images={templateImages[data.id]} + icons={templateIcons[data.id]} + /> + )} + {data.type === 'Tool' && ( + goToTool(data)} /> + )} + + )} + {!data.badge && + (data.type === 'Chatflow' || data.type === 'Agentflow' || data.type === 'AgentflowV2') && ( { icons={templateIcons[data.id]} /> )} - {data.type === 'Tool' && ( - goToTool(data)} /> - )} - - )} - {!data.badge && - (data.type === 'Chatflow' || - data.type === 'Agentflow' || - data.type === 'AgentflowV2') && ( - goToCanvas(data)} - data={data} - images={templateImages[data.id]} - icons={templateIcons[data.id]} - /> + {!data.badge && data.type === 'Tool' && ( + goToTool(data)} /> )} - {!data.badge && data.type === 'Tool' && ( - goToTool(data)} /> - )} - - ))} + + ))} + + )} + + ) : ( + + )} + {!isLoading && (!getAllCustomTemplatesApi.data || getAllCustomTemplatesApi.data.length === 0) && ( + + + WorkflowEmptySVG - )} - - ) : ( - - )} - {!isLoading && (!getAllCustomTemplatesApi.data || getAllCustomTemplatesApi.data.length === 0) && ( - - - WorkflowEmptySVG - -
    No Saved Custom Templates
    -
    - )} -
    +
    No Saved Custom Templates
    + + )} +
    + )} @@ -855,6 +942,14 @@ const Marketplace = () => { onConfirm={() => setShowToolDialog(false)} onUseTemplate={(tool) => onUseTemplate(tool)} > + {showShareTemplateDialog && ( + setShowShareTemplateDialog(false)} + setError={setError} + > + )} ) diff --git a/packages/ui/src/views/organization/index.jsx b/packages/ui/src/views/organization/index.jsx new file mode 100644 index 000000000..72886d114 --- /dev/null +++ b/packages/ui/src/views/organization/index.jsx @@ -0,0 +1,495 @@ +import { useEffect, useState } from 'react' +import { useNavigate } from 'react-router-dom' +import { z } from 'zod' + +// material-ui +import { Alert, Box, Button, Chip, Divider, Icon, List, ListItemText, Stack, TextField, Typography } from '@mui/material' + +// project imports +import { StyledButton } from '@/ui-component/button/StyledButton' +import { Input } from '@/ui-component/input/Input' +import { BackdropLoader } from '@/ui-component/loading/BackdropLoader' + +// API +import accountApi from '@/api/account.api' +import authApi from '@/api/auth' +import loginMethodApi from '@/api/loginmethod' + +// Hooks +import useApi from '@/hooks/useApi' +import { store } from '@/store' +import { loginSuccess } from '@/store/reducers/authSlice' + +// utils +import useNotifier from '@/utils/useNotifier' +import { passwordSchema } from '@/utils/validation' + +// Icons +import Auth0SSOLoginIcon from '@/assets/images/auth0.svg' +import GoogleSSOLoginIcon from '@/assets/images/google.svg' +import AzureSSOLoginIcon from '@/assets/images/microsoft-azure.svg' +import { useConfig } from '@/store/context/ConfigContext' +import { IconCircleCheck, IconExclamationCircle } from '@tabler/icons-react' + +// ==============================|| Organization & Admin User Setup ||============================== // + +// IMPORTANT: when updating this schema, update the schema on the server as well +// packages/server/src/enterprise/Interface.Enterprise.ts +const OrgSetupSchema = z + .object({ + username: z.string().min(1, 'Name is required'), + email: z.string().min(1, 'Email is required').email('Invalid email address'), + password: passwordSchema, + confirmPassword: z.string().min(1, 'Confirm Password is required') + }) + .refine((data) => data.password === data.confirmPassword, { + message: "Passwords don't match", + path: ['confirmPassword'] + }) + +const OrganizationSetupPage = () => { + useNotifier() + const { isEnterpriseLicensed, isOpenSource } = useConfig() + + const orgNameInput = { + label: 'Organization', + name: 'organization', + type: 'text', + placeholder: 'Acme' + } + + const usernameInput = { + label: 'Username', + name: 'username', + type: 'text', + placeholder: 'John Doe' + } + + const passwordInput = { + label: 'Password', + name: 'password', + type: 'password', + placeholder: '********' + } + + const confirmPasswordInput = { + label: 'Confirm Password', + name: 'confirmPassword', + type: 'password', + placeholder: '********' + } + + const emailInput = { + label: 'EMail', + name: 'email', + type: 'email', + placeholder: 'user@company.com' + } + + const [email, setEmail] = useState('') + const [password, setPassword] = useState('') + const [confirmPassword, setConfirmPassword] = useState('') + const [username, setUsername] = useState('') + const [orgName, setOrgName] = useState('') + const [existingUsername, setExistingUsername] = useState('') + const [existingPassword, setExistingPassword] = useState('') + + const [loading, setLoading] = useState(false) + const [authError, setAuthError] = useState('') + const [successMsg, setSuccessMsg] = useState(undefined) + const [requiresAuthentication, setRequiresAuthentication] = useState(false) + + const loginApi = useApi(authApi.login) + const registerAccountApi = useApi(accountApi.registerAccount) + const getBasicAuthApi = useApi(accountApi.getBasicAuth) + const navigate = useNavigate() + + const getDefaultProvidersApi = useApi(loginMethodApi.getLoginMethods) + const [configuredSsoProviders, setConfiguredSsoProviders] = useState([]) + + const register = async (event) => { + event.preventDefault() + const result = OrgSetupSchema.safeParse({ + orgName, + username, + email, + password, + confirmPassword + }) + if (result.success) { + setLoading(true) + setAuthError('') + + // Check authentication first if required + if (requiresAuthentication) { + try { + const authResult = await accountApi.checkBasicAuth({ + username: existingUsername, + password: existingPassword + }) + + if (!authResult || !authResult.data || authResult.data.message !== 'Authentication successful') { + setAuthError('Authentication failed. Please check your existing credentials.') + setLoading(false) + return + } + } catch (error) { + setAuthError('Authentication failed. Please check your existing credentials.') + setLoading(false) + return + } + } + + // Proceed with registration after successful authentication + const body = { + user: { + name: username, + email: email, + type: 'pro', + credential: password + } + } + if (isEnterpriseLicensed) { + body.organization = { + name: orgName + } + } + await registerAccountApi.request(body) + } else { + // Handle validation errors + const errorMessages = result.error.errors.map((error) => error.message) + setAuthError(errorMessages.join(', ')) + } + } + + useEffect(() => { + if (registerAccountApi.error) { + const errMessage = + typeof registerAccountApi.error.response.data === 'object' + ? registerAccountApi.error.response.data.message + : registerAccountApi.error.response.data + let finalErrMessage = '' + if (isEnterpriseLicensed) { + finalErrMessage = `Error in registering organization. Please contact your administrator. (${errMessage})` + } else { + finalErrMessage = `Error in registering account: ${errMessage}` + } + setAuthError(finalErrMessage) + setLoading(false) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [registerAccountApi.error]) + + useEffect(() => { + if (getBasicAuthApi.data && getBasicAuthApi.data.isUsernamePasswordSet === true) { + setRequiresAuthentication(true) + } + }, [getBasicAuthApi.data]) + + useEffect(() => { + if (!isOpenSource) { + getDefaultProvidersApi.request() + } else { + getBasicAuthApi.request() + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + useEffect(() => { + if (getDefaultProvidersApi.data && getDefaultProvidersApi.data.providers) { + setConfiguredSsoProviders(getDefaultProvidersApi.data.providers.map((provider) => provider)) + } + }, [getDefaultProvidersApi.data]) + + useEffect(() => { + if (registerAccountApi.data) { + setAuthError(undefined) + setConfirmPassword('') + setPassword('') + setUsername('') + setEmail('') + setSuccessMsg(registerAccountApi.data.message) + setTimeout(() => { + const body = { + email, + password + } + loginApi.request(body) + }, 1000) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [registerAccountApi.data]) + + useEffect(() => { + if (loginApi.data) { + setLoading(false) + store.dispatch(loginSuccess(loginApi.data)) + localStorage.setItem('username', loginApi.data.name) + navigate(location.state?.path || '/') + //navigate(0) + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [loginApi.data]) + + const signInWithSSO = (ssoProvider) => { + window.location.href = `/api/v1/${ssoProvider}/login` + } + + return ( + <> + + + {authError && ( + } variant='filled' severity='error'> + {authError.split(', ').length > 0 ? ( + + {authError.split(', ').map((error, index) => ( + + ))} + + ) : ( + authError + )} + + )} + {successMsg && ( + } variant='filled' severity='success'> + {successMsg} + + )} + + Setup Account + + {requiresAuthentication && ( + + Application authentication now requires email and password. Contact administrator to setup an account. + + )} + {(isOpenSource || isEnterpriseLicensed) && ( + + Account setup does not make any external connections, your data stays securely on your locally hosted server. + + )} +
    + + {requiresAuthentication && ( + <> + +
    + + Existing Username * + +
    +
    + setExistingUsername(e.target.value)} + /> + + Existing username that was set as FLOWISE_USERNAME environment variable + +
    + +
    + + Existing Password * + +
    +
    + setExistingPassword(e.target.value)} + /> + + Existing password that was set as FLOWISE_PASSWORD environment variable + +
    + + + + + )} + {isEnterpriseLicensed && ( + <> + +
    + + Organization Name: * + +
    +
    + setOrgName(newValue)} + value={orgName} + showDialog={false} + /> +
    + + + + + + + )} + +
    + + Administrator Name * + +
    +
    + setUsername(newValue)} + value={username} + showDialog={false} + /> + + Is used for display purposes only. + +
    + +
    + + Administrator Email * + +
    +
    + setEmail(newValue)} + type='email' + value={email} + showDialog={false} + /> + + Kindly use a valid email address. Will be used as login id. + +
    + +
    + + Password * + +
    +
    + setPassword(newValue)} value={password} /> + + + Password must be at least 8 characters long and contain at least one lowercase letter, one uppercase + letter, one digit, and one special character. + + +
    + +
    + + Confirm Password * + +
    +
    + setConfirmPassword(newValue)} + value={confirmPassword} + /> + + Reconfirm your password. Must match the password typed above. + +
    + + Sign Up + + {configuredSsoProviders && configuredSsoProviders.length > 0 && OR} + {configuredSsoProviders && + configuredSsoProviders.map( + (ssoProvider) => + //https://learn.microsoft.com/en-us/entra/identity-platform/howto-add-branding-in-apps + ssoProvider === 'azure' && ( + + ) + )} + {configuredSsoProviders && + configuredSsoProviders.map( + (ssoProvider) => + ssoProvider === 'google' && ( + + ) + )} + {configuredSsoProviders && + configuredSsoProviders.map( + (ssoProvider) => + ssoProvider === 'auth0' && ( + + ) + )} +
    +
    +
    +
    + {loading && } + + ) +} + +export default OrganizationSetupPage diff --git a/packages/ui/src/views/roles/CreateEditRoleDialog.css b/packages/ui/src/views/roles/CreateEditRoleDialog.css new file mode 100644 index 000000000..7963033b8 --- /dev/null +++ b/packages/ui/src/views/roles/CreateEditRoleDialog.css @@ -0,0 +1,106 @@ +.role-editor { + padding: 20px 0px; + border-radius: 10px; + width: 100%; + font-family: Arial, sans-serif; + display: flex; + flex-direction: column; + gap: 20px; + height: 75vh; +} + +.role-name { + position: sticky; + top: 0; + z-index: 1; +} + +.role-description { + margin-bottom: 20px; + position: sticky; + top: 0; + padding: 10px 0; + z-index: 1; +} + +.permissions-container > p, +.role-name label { + display: block; + font-weight: bold; + margin: 0; + margin-bottom: 5px; +} + +.role-name input { + width: 100%; + padding: 10px; + border: 1px solid #ccc; + border-radius: 5px; + font-size: 14px; + display: block; +} + +.permissions-container { + overflow-y: hidden; + max-height: calc(100vh - 120px); /* Adjust based on header and input height */ +} + +.permissions-list-wrapper { + overflow-y: auto; + max-height: 100%; + padding-right: 10px; + padding-bottom: 10px; +} + +.permission-category { + margin-bottom: 20px; + border: 1px solid #e0e0e0; + border-radius: 8px; + padding: 15px; +} + +.category-header { + display: flex; + justify-content: space-between; + align-items: center; + border-bottom: 1px solid #e0e0e0; + padding-bottom: 10px; + margin-bottom: 10px; +} + +.category-header h3 { + margin: 0; + font-size: 16px; +} + +.category-header button { + background-color: #007bff; + color: white; + border: none; + border-radius: 5px; + padding: 5px 10px; + cursor: pointer; + font-size: 14px; +} + +.permissions-list { + display: flex; + flex-wrap: wrap; + margin-top: 10px; +} + +.permission-item { + width: 50%; + box-sizing: border-box; +} + +.permission-item label { + font-size: 14px; + display: flex; + align-items: center; + padding: 5px 0; +} + +.permission-item input { + margin-right: 10px; +} diff --git a/packages/ui/src/views/roles/CreateEditRoleDialog.jsx b/packages/ui/src/views/roles/CreateEditRoleDialog.jsx new file mode 100644 index 000000000..9592966c7 --- /dev/null +++ b/packages/ui/src/views/roles/CreateEditRoleDialog.jsx @@ -0,0 +1,421 @@ +import { createPortal } from 'react-dom' +import PropTypes from 'prop-types' +import { useState, useEffect } from 'react' +import { useDispatch, useSelector } from 'react-redux' +import { enqueueSnackbar as enqueueSnackbarAction, closeSnackbar as closeSnackbarAction } from '@/store/actions' + +// Material +import { Box, Typography, OutlinedInput, Button, Dialog, DialogActions, DialogContent, DialogTitle } from '@mui/material' + +// Project imports +import { StyledButton } from '@/ui-component/button/StyledButton' +import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' + +// Icons +import { IconX, IconUser } from '@tabler/icons-react' + +// API +import authApi from '@/api/auth' +import roleApi from '@/api/role' + +// Hooks +import useApi from '@/hooks/useApi' +import { useConfig } from '@/store/context/ConfigContext' + +// utils +import useNotifier from '@/utils/useNotifier' + +// const +import { HIDE_CANVAS_DIALOG, SHOW_CANVAS_DIALOG } from '@/store/actions' + +import './CreateEditRoleDialog.css' + +const CreateEditRoleDialog = ({ show, dialogProps, onCancel, onConfirm, setError }) => { + const portalElement = document.getElementById('portal') + + const dispatch = useDispatch() + const { isEnterpriseLicensed } = useConfig() + + // ==============================|| Snackbar ||============================== // + + useNotifier() + + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const [roleName, setRoleName] = useState('') + const [roleDescription, setRoleDescription] = useState('') + const [selectedPermissions, setSelectedPermissions] = useState({}) + const [permissions, setPermissions] = useState({}) + const [dialogData, setDialogData] = useState({}) + + const getAllPermissionsApi = useApi(authApi.getAllPermissions) + const currentUser = useSelector((state) => state.auth.user) + + const handleRoleNameChange = (event) => { + setRoleName(event.target.value) + } + const handleRoleDescChange = (event) => { + setRoleDescription(event.target.value) + } + + const handlePermissionChange = (category, key) => { + setSelectedPermissions((prevPermissions) => { + const updatedCategoryPermissions = { + ...prevPermissions[category], + [key]: !prevPermissions[category]?.[key] + } + + if (category === 'templates') { + if (key !== 'templates:marketplace' && key !== 'templates:custom') { + updatedCategoryPermissions['templates:marketplace'] = true + updatedCategoryPermissions['templates:custom'] = true + } + } else { + const viewPermissionKey = `${category}:view` + if (key !== viewPermissionKey) { + const hasEnabledPermissions = Object.keys(updatedCategoryPermissions).some( + ([permissionKey, isEnabled]) => permissionKey !== viewPermissionKey && isEnabled + ) + if (hasEnabledPermissions) { + updatedCategoryPermissions[viewPermissionKey] = true + } + } else { + const hasEnabledPermissions = Object.keys(updatedCategoryPermissions).some( + ([permissionKey, isEnabled]) => permissionKey === viewPermissionKey && isEnabled + ) + if (hasEnabledPermissions) { + updatedCategoryPermissions[key] = true + } + } + } + + return { + ...prevPermissions, + [category]: updatedCategoryPermissions + } + }) + } + + const isCheckboxDisabled = (permissions, category, key) => { + if (category === 'templates') { + // For templates, disable marketplace and custom view if any other permission is enabled + if (key === 'templates:marketplace' || key === 'templates:custom') { + return Object.entries(permissions[category] || {}).some( + ([permKey, isEnabled]) => permKey !== 'templates:marketplace' && permKey !== 'templates:custom' && isEnabled + ) + } + } else { + const viewPermissionKey = `${category}:view` + if (key === viewPermissionKey) { + // Disable the view permission if any other permission is enabled + return Object.entries(permissions[category] || {}).some( + ([permKey, isEnabled]) => permKey !== viewPermissionKey && isEnabled + ) + } + } + + // Non-view permissions are never disabled + return false + } + + const handleSelectAll = (category) => { + const allSelected = permissions[category].every((permission) => selectedPermissions[category]?.[permission.key]) + setSelectedPermissions((prevPermissions) => ({ + ...prevPermissions, + [category]: Object.fromEntries(permissions[category].map((permission) => [permission.key, !allSelected])) + })) + } + + useEffect(() => { + if ((dialogProps.type === 'EDIT' || dialogProps.type === 'VIEW') && dialogProps.data) { + setDialogData(dialogProps.data) + } + getAllPermissionsApi.request() + return () => { + setRoleName('') + setRoleDescription('') + setSelectedPermissions({}) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [dialogProps]) + + useEffect(() => { + if (getAllPermissionsApi.error) { + setError(getAllPermissionsApi.error) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getAllPermissionsApi.error]) + + useEffect(() => { + if (show) dispatch({ type: SHOW_CANVAS_DIALOG }) + else dispatch({ type: HIDE_CANVAS_DIALOG }) + return () => dispatch({ type: HIDE_CANVAS_DIALOG }) + }, [show, dispatch]) + + useEffect(() => { + if (getAllPermissionsApi.data) { + setRoleName(dialogData.name) + setRoleDescription(dialogData.description) + const permissions = getAllPermissionsApi.data + // Filter out enterprise permissions if not licensed + if (!isEnterpriseLicensed) { + Object.keys(permissions).forEach((category) => { + permissions[category] = permissions[category].filter((permission) => !permission.isEnterprise) + }) + // Remove categories that have no permissions left + Object.keys(permissions).forEach((category) => { + if (permissions[category].length === 0) { + delete permissions[category] + } + }) + } + setPermissions(permissions) + if ((dialogProps.type === 'EDIT' || dialogProps.type === 'VIEW') && dialogProps.data) { + const dialogDataPermissions = JSON.parse(dialogData.permissions) + if (dialogDataPermissions && dialogDataPermissions.length > 0) { + Object.keys(permissions).forEach((category) => { + Object.keys(permissions[category]).forEach((key) => { + dialogDataPermissions.forEach((perm) => { + if (perm === permissions[category][key].key) { + if (!selectedPermissions[category]) { + selectedPermissions[category] = {} + } + selectedPermissions[category][perm] = true + } + }) + }) + }) + setSelectedPermissions(selectedPermissions) + } + } + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getAllPermissionsApi.data]) + + const createRole = async () => { + try { + // if roleName has a space, raise an error + if (roleName.indexOf(' ') > -1) { + enqueueSnackbar({ + message: `Role Name cannot contain spaces.`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + return + } + const saveObj = { + name: roleName, + description: roleDescription, + createdBy: currentUser.id, + organizationId: currentUser.activeOrganizationId + } + const tempPermissions = Object.keys(selectedPermissions) + .map((category) => { + return Object.keys(selectedPermissions[category]).map((key) => { + if (selectedPermissions[category][key]) { + return key + } + }) + }) + .flat() + saveObj.permissions = JSON.stringify(tempPermissions) + let saveResp + if (dialogProps.type === 'EDIT') { + saveObj.id = dialogProps.data.id + saveObj.updatedBy = currentUser.id + saveResp = await roleApi.updateRole(saveObj) + } else { + saveResp = await roleApi.createRole(saveObj) + } + if (saveResp.data) { + enqueueSnackbar({ + message: dialogProps.type === 'EDIT' ? 'Role Updated Successfully' : 'New Role Created!', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm(saveResp.data.id) + } + } catch (error) { + enqueueSnackbar({ + message: `Failed : ${typeof error.response.data === 'object' ? error.response.data.message : error.response.data}`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + onCancel() + } + } + + const checkDisabled = () => { + if (dialogProps.type === 'VIEW') { + return true + } + if (!roleName || roleName === '') { + return true + } + if (!Object.keys(selectedPermissions).length || !ifPermissionContainsTrue(selectedPermissions)) { + return true + } + return false + } + + const ifPermissionContainsTrue = (obj) => { + for (const key in obj) { + if (typeof obj[key] === 'object' && obj[key] !== null) { + // Recursively check nested objects + if (ifPermissionContainsTrue(obj[key])) { + return true + } + } else if (obj[key] === true) { + return true + } + } + return false + } + + const component = show ? ( + + +
    + + {dialogProps.type === 'EDIT' ? 'Edit Role' : dialogProps.type === 'VIEW' ? 'View Role' : 'Create New Role'} +
    +
    + +
    + + + *  Role Name + + + + + + Role Description + + + +
    +

    Permissions

    +
    + {permissions && + Object.keys(permissions).map((category) => ( +
    +
    +

    + {category + .replace(/([A-Z])/g, ' $1') + .trim() + .toUpperCase()} +

    + +
    +
    + {permissions[category].map((permission, index) => ( +
    + +
    + ))} +
    +
    + ))} +
    +
    +
    +
    + + + {dialogProps.type !== 'VIEW' && ( + + {dialogProps.type !== 'EDIT' ? 'Create Role' : 'Update Role'} + + )} + + +
    + ) : null + + return createPortal(component, portalElement) +} + +CreateEditRoleDialog.propTypes = { + show: PropTypes.bool, + dialogProps: PropTypes.object, + onCancel: PropTypes.func, + onConfirm: PropTypes.func, + setError: PropTypes.func +} + +export default CreateEditRoleDialog diff --git a/packages/ui/src/views/roles/index.jsx b/packages/ui/src/views/roles/index.jsx new file mode 100644 index 000000000..d090c4acb --- /dev/null +++ b/packages/ui/src/views/roles/index.jsx @@ -0,0 +1,662 @@ +import React from 'react' +import { useEffect, useState } from 'react' +import { useDispatch, useSelector } from 'react-redux' +import { enqueueSnackbar as enqueueSnackbarAction, closeSnackbar as closeSnackbarAction } from '@/store/actions' +import * as PropTypes from 'prop-types' + +// material-ui +import { styled } from '@mui/material/styles' +import { tableCellClasses } from '@mui/material/TableCell' +import { + Box, + Skeleton, + Stack, + Table, + TableBody, + TableCell, + TableContainer, + TableHead, + TableRow, + Paper, + useTheme, + Typography, + Button, + Drawer, + TableSortLabel +} from '@mui/material' + +// project imports +import MainCard from '@/ui-component/cards/MainCard' +import { PermissionIconButton, StyledPermissionButton } from '@/ui-component/button/RBACButtons' +import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' +import ViewHeader from '@/layout/MainLayout/ViewHeader' +import ErrorBoundary from '@/ErrorBoundary' +import CreateEditRoleDialog from '@/views/roles/CreateEditRoleDialog' + +// API +import authApi from '@/api/auth' +import roleApi from '@/api/role' +import userApi from '@/api/user' + +// Hooks +import useApi from '@/hooks/useApi' +import useConfirm from '@/hooks/useConfirm' + +// utils +import useNotifier from '@/utils/useNotifier' + +// Icons +import { IconEdit, IconPlus, IconEye, IconEyeOff, IconX, IconTrash } from '@tabler/icons-react' +import roles_emptySVG from '@/assets/images/roles_empty.svg' + +import { useError } from '@/store/context/ErrorContext' + +const StyledTableCell = styled(TableCell)(({ theme }) => ({ + borderColor: theme.palette.grey[900] + 25, + + [`&.${tableCellClasses.head}`]: { + color: theme.palette.grey[900] + }, + [`&.${tableCellClasses.body}`]: { + fontSize: 14, + height: 48 + } +})) + +const StyledTableRow = styled(TableRow)(() => ({ + // hide last border + '&:last-child td, &:last-child th': { + border: 0 + } +})) + +function ViewPermissionsDrawer(props) { + const theme = useTheme() + const [permissions, setPermissions] = useState({}) + const [selectedPermissions, setSelectedPermissions] = useState({}) + + const { setError } = useError() + + const getAllPermissionsApi = useApi(authApi.getAllPermissions) + + useEffect(() => { + if (props.open) { + getAllPermissionsApi.request() + } + return () => { + setSelectedPermissions({}) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [props.open]) + + useEffect(() => { + if (getAllPermissionsApi.error) { + setError(getAllPermissionsApi.error) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getAllPermissionsApi.error]) + + useEffect(() => { + if (getAllPermissionsApi.data) { + const permissions = getAllPermissionsApi.data + setPermissions(permissions) + const rolePermissions = JSON.parse(props.role.permissions) + if (rolePermissions && rolePermissions.length > 0) { + Object.keys(permissions).forEach((category) => { + Object.keys(permissions[category]).forEach((key) => { + rolePermissions.forEach((perm) => { + if (perm === permissions[category][key].key) { + if (!selectedPermissions[category]) { + selectedPermissions[category] = {} + } + selectedPermissions[category][perm] = true + } + }) + }) + }) + setSelectedPermissions(selectedPermissions) + } + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getAllPermissionsApi.data]) + + return ( + props.setOpen(false)} sx={{ minWidth: 320 }}> + + + {props.role.name} + + {props.role.description && ( + + {props.role.description} + + )} + + + Permissions + + + {permissions && + Object.keys(permissions).map((category) => ( + + + + {category + .replace(/([A-Z])/g, ' $1') + .trim() + .toUpperCase()} + + + + {permissions[category].map((permission, index) => ( +
    + +
    + ))} +
    +
    + ))} +
    +
    +
    +
    + ) +} +ViewPermissionsDrawer.propTypes = { + open: PropTypes.bool, + setOpen: PropTypes.func, + role: PropTypes.any +} + +function ShowRoleRow(props) { + const [openAssignedUsersDrawer, setOpenAssignedUsersDrawer] = useState(false) + const [openViewPermissionsDrawer, setOpenViewPermissionsDrawer] = useState(false) + const [selectedRoleId, setSelectedRoleId] = useState('') + const [assignedUsers, setAssignedUsers] = useState([]) + const [order, setOrder] = useState('asc') + const [orderBy, setOrderBy] = useState('workspace') + + const theme = useTheme() + const customization = useSelector((state) => state.customization) + + const getAllUsersByRoleIdApi = useApi(userApi.getUserByRoleId) + + const handleViewAssignedUsers = (roleId) => { + setOpenAssignedUsersDrawer(true) + setSelectedRoleId(roleId) + } + + const handleRequestSort = (property) => { + const isAsc = orderBy === property && order === 'asc' + setOrder(isAsc ? 'desc' : 'asc') + setOrderBy(property) + } + + const sortedAssignedUsers = [...assignedUsers].sort((a, b) => { + let comparison = 0 + + if (orderBy === 'workspace') { + const workspaceA = (a.workspace?.name || '').toLowerCase() + const workspaceB = (b.workspace?.name || '').toLowerCase() + comparison = workspaceA.localeCompare(workspaceB) + if (comparison === 0) { + const userA = (a.user?.name || a.user?.email || '').toLowerCase() + const userB = (b.user?.name || b.user?.email || '').toLowerCase() + comparison = userA.localeCompare(userB) + } + } else if (orderBy === 'user') { + const userA = (a.user?.name || a.user?.email || '').toLowerCase() + const userB = (b.user?.name || b.user?.email || '').toLowerCase() + comparison = userA.localeCompare(userB) + if (comparison === 0) { + const workspaceA = (a.workspace?.name || '').toLowerCase() + const workspaceB = (b.workspace?.name || '').toLowerCase() + comparison = workspaceA.localeCompare(workspaceB) + } + } + + return order === 'asc' ? comparison : -comparison + }) + + useEffect(() => { + if (getAllUsersByRoleIdApi.data) { + setAssignedUsers(getAllUsersByRoleIdApi.data) + } + }, [getAllUsersByRoleIdApi.data]) + + useEffect(() => { + if (openAssignedUsersDrawer && selectedRoleId) { + getAllUsersByRoleIdApi.request(selectedRoleId) + } else { + setOpenAssignedUsersDrawer(false) + setSelectedRoleId('') + setAssignedUsers([]) + setOrder('asc') + setOrderBy('workspace') + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [openAssignedUsersDrawer]) + + return ( + <> + + {props.role.name} + {props.role.description} + + + + {JSON.parse(props.role.permissions).map((d, key) => ( + + {d} + {', '} + + ))} + + setOpenViewPermissionsDrawer(!openViewPermissionsDrawer)} + > + + + + + + {props.role.userCount} + {props.role.userCount > 0 && ( + handleViewAssignedUsers(props.role.id)} + > + {props.role.userCount > 0 && openAssignedUsersDrawer ? : } + + )} + + + props.onEditClick(props.role)} + > + + + 0} + color='error' + title={props.role.userCount > 0 ? 'Remove users with the role from Workspace first' : 'Delete'} + onClick={() => props.onDeleteClick(props.role)} + > + + + + + setOpenAssignedUsersDrawer(false)} sx={{ minWidth: 320 }}> + + + Assigned Users + + + + + + + handleRequestSort('user')} + > + User + + + + handleRequestSort('workspace')} + > + Workspace + + + + + + {sortedAssignedUsers.map((item, index) => ( + + {item.user.name || item.user.email} + {item.workspace.name} + + ))} + +
    +
    +
    +
    + + + ) +} + +ShowRoleRow.propTypes = { + key: PropTypes.any, + role: PropTypes.any, + onViewClick: PropTypes.func, + onEditClick: PropTypes.func, + onDeleteClick: PropTypes.func, + open: PropTypes.bool, + theme: PropTypes.any +} + +// ==============================|| Roles ||============================== // + +const Roles = () => { + const theme = useTheme() + const customization = useSelector((state) => state.customization) + const dispatch = useDispatch() + useNotifier() + const { error, setError } = useError() + + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const [isLoading, setLoading] = useState(true) + + const [showCreateEditDialog, setShowCreateEditDialog] = useState(false) + const [dialogProps, setDialogProps] = useState({}) + + const { confirm } = useConfirm() + const currentUser = useSelector((state) => state.auth.user) + + const getAllRolesByOrganizationIdApi = useApi(roleApi.getAllRolesByOrganizationId) + + const [roles, setRoles] = useState([]) + const [search, setSearch] = useState('') + + const onSearchChange = (event) => { + setSearch(event.target.value) + } + + function filterUsers(data) { + return ( + (data.name && data.name.toLowerCase().indexOf(search.toLowerCase()) > -1) || + (data.description && data.description.toLowerCase().indexOf(search.toLowerCase()) > -1) + ) + } + + const addNew = () => { + const dialogProp = { + type: 'ADD', + cancelButtonName: 'Cancel', + confirmButtonName: 'Invite', + data: {} + } + setDialogProps(dialogProp) + setShowCreateEditDialog(true) + } + + const edit = (role) => { + const dialogProp = { + type: 'EDIT', + cancelButtonName: 'Cancel', + confirmButtonName: 'Invite', + data: { + ...role + } + } + setDialogProps(dialogProp) + setShowCreateEditDialog(true) + } + + const view = (role) => { + const dialogProp = { + type: 'VIEW', + cancelButtonName: 'Cancel', + confirmButtonName: 'Invite', + data: { + ...role + } + } + setDialogProps(dialogProp) + setShowCreateEditDialog(true) + } + + const deleteRole = async (role) => { + const confirmPayload = { + title: `Delete`, + description: `Delete Role ${role.name}?`, + confirmButtonName: 'Delete', + cancelButtonName: 'Cancel' + } + const isConfirmed = await confirm(confirmPayload) + + if (isConfirmed) { + try { + const deleteResp = await roleApi.deleteRole(role.id, currentUser.activeOrganizationId) + if (deleteResp.data) { + enqueueSnackbar({ + message: 'Role deleted', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm() + } + } catch (error) { + enqueueSnackbar({ + message: `Failed to delete Role: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } + } + } + + const onConfirm = () => { + setShowCreateEditDialog(false) + getAllRolesByOrganizationIdApi.request(currentUser.activeOrganizationId) + } + + useEffect(() => { + getAllRolesByOrganizationIdApi.request(currentUser.activeOrganizationId) + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + useEffect(() => { + setLoading(getAllRolesByOrganizationIdApi.loading) + }, [getAllRolesByOrganizationIdApi.loading]) + + useEffect(() => { + if (getAllRolesByOrganizationIdApi.error) { + setError(getAllRolesByOrganizationIdApi.error) + } + }, [getAllRolesByOrganizationIdApi.error, setError]) + + useEffect(() => { + if (getAllRolesByOrganizationIdApi.data) { + setRoles(getAllRolesByOrganizationIdApi.data) + } + }, [getAllRolesByOrganizationIdApi.data]) + + return ( + <> + + {error ? ( + + ) : ( + + + } + id='btn_createUser' + > + Add Role + + + {!isLoading && roles.length === 0 ? ( + + + roles_emptySVG + +
    No Roles Yet
    +
    + ) : ( + <> + + + + + + + Name + Description + Permissions + Assigned Users + + + + + {isLoading ? ( + <> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ) : ( + <> + {roles.filter(filterUsers).map((role, index) => ( + + ))} + + )} + +
    +
    +
    +
    + + )} +
    + )} +
    + {showCreateEditDialog && ( + setShowCreateEditDialog(false)} + onConfirm={onConfirm} + setError={setError} + > + )} + + + ) +} + +export default Roles diff --git a/packages/ui/src/views/serverlogs/index.jsx b/packages/ui/src/views/serverlogs/index.jsx new file mode 100644 index 000000000..a0b935a12 --- /dev/null +++ b/packages/ui/src/views/serverlogs/index.jsx @@ -0,0 +1,315 @@ +import { useState, useEffect, forwardRef } from 'react' +import PropTypes from 'prop-types' + +// project imports +import MainCard from '@/ui-component/cards/MainCard' +import DatePicker from 'react-datepicker' +import { gridSpacing } from '@/store/constant' +import CodeMirror from '@uiw/react-codemirror' +import { EditorView } from '@codemirror/view' +import { markdown } from '@codemirror/lang-markdown' +import { sublime } from '@uiw/codemirror-theme-sublime' + +// material-ui +import { Box, Skeleton, Stack, Select, MenuItem, ListItemButton } from '@mui/material' +import { useTheme } from '@mui/material/styles' + +// ui +import ViewHeader from '@/layout/MainLayout/ViewHeader' +import ErrorBoundary from '@/ErrorBoundary' + +import useApi from '@/hooks/useApi' +import logsApi from '@/api/log' +import { useError } from '@/store/context/ErrorContext' + +import LogsEmptySVG from '@/assets/images/logs_empty.svg' +import 'react-datepicker/dist/react-datepicker.css' + +const DatePickerCustomInput = forwardRef(function DatePickerCustomInput({ value, onClick }, ref) { + return ( + + {value} + + ) +}) + +DatePickerCustomInput.propTypes = { + value: PropTypes.string, + onClick: PropTypes.func +} + +const searchTimeRanges = [ + 'Last hour', + 'Last 4 hours', + 'Last 24 hours', + 'Last 2 days', + 'Last 7 days', + 'Last 14 days', + 'Last 1 month', + 'Last 2 months', + 'Last 3 months', + 'Custom' +] + +const getDateBefore = (unit, value) => { + const now = new Date() + if (unit === 'hours') now.setHours(now.getHours() - value) + if (unit === 'days') now.setDate(now.getDate() - value) + if (unit === 'months') now.setMonth(now.getMonth() - value) + return now +} + +const getDateTimeFormatted = (date) => { + const now = date ? date : new Date() + const year = now.getFullYear() + const month = (now.getMonth() + 1).toString().padStart(2, '0') // +1 because getMonth() returns 0 for January, 1 for February, etc. + const day = now.getDate().toString().padStart(2, '0') + const hour = now.getHours().toString().padStart(2, '0') + + return `${year}-${month}-${day}-${hour}` +} + +const subtractTime = (months, days, hours) => { + let checkDate = new Date() + + if (months > 0) { + checkDate.setMonth(checkDate.getMonth() - months) + } else { + checkDate.setMonth(checkDate.getMonth()) + } + + if (days > 0) { + checkDate.setDate(checkDate.getDate() - days) + } else { + checkDate.setDate(checkDate.getDate()) + } + + if (hours > 0) { + checkDate.setHours(checkDate.getHours() - hours) + } else { + checkDate.setHours(checkDate.getHours()) + } + + const year = checkDate.getFullYear() + const month = (checkDate.getMonth() + 1).toString().padStart(2, '0') + const day = checkDate.getDate().toString().padStart(2, '0') + const hour = checkDate.getHours().toString().padStart(2, '0') + + return `${year}-${month}-${day}-${hour}` +} + +const Logs = () => { + const colorTheme = useTheme() + + const customStyle = EditorView.baseTheme({ + '&': { + color: '#191b1f', + padding: '10px', + borderRadius: '15px' + }, + '.cm-placeholder': { + color: 'rgba(120, 120, 120, 0.5)' + }, + '.cm-content': { + fontFamily: 'Roboto, sans-serif', + fontSize: '0.95rem', + letterSpacing: '0em', + fontWeight: 400, + lineHeight: '1.5em', + color: colorTheme.darkTextPrimary + } + }) + + const getLogsApi = useApi(logsApi.getLogs) + const { error } = useError() + + const [isLoading, setLoading] = useState(true) + const [logData, setLogData] = useState('') + const [selectedTimeSearch, setSelectedTimeSearch] = useState('Last hour') + const [startDate, setStartDate] = useState(getDateBefore('hours', 1)) + const [endDate, setEndDate] = useState(new Date()) + + const handleTimeSelectionChange = (event) => { + setSelectedTimeSearch(event.target.value) + switch (event.target.value) { + case 'Last hour': + getLogsApi.request(subtractTime(0, 0, 1), getDateTimeFormatted()) + break + case 'Last 4 hours': + getLogsApi.request(subtractTime(0, 0, 4), getDateTimeFormatted()) + break + case 'Last 24 hours': + getLogsApi.request(subtractTime(0, 0, 24), getDateTimeFormatted()) + break + case 'Last 2 days': + getLogsApi.request(subtractTime(0, 2, 0), getDateTimeFormatted()) + break + case 'Last 7 days': + getLogsApi.request(subtractTime(0, 7, 0), getDateTimeFormatted()) + break + case 'Last 14 days': + getLogsApi.request(subtractTime(0, 14, 0), getDateTimeFormatted()) + break + case 'Last 1 month': + getLogsApi.request(subtractTime(1, 0, 0), getDateTimeFormatted()) + break + case 'Last 2 months': + getLogsApi.request(subtractTime(2, 0, 0), getDateTimeFormatted()) + break + case 'Last 3 months': + getLogsApi.request(subtractTime(3, 0, 0), getDateTimeFormatted()) + break + case 'Custom': + setStartDate(getDateBefore('hours', 1)) + setEndDate(new Date()) + getLogsApi.request(subtractTime(0, 0, 1), getDateTimeFormatted()) + break + default: + break + } + } + + const onStartDateSelected = (date) => { + setStartDate(date) + getLogsApi.request(getDateTimeFormatted(date), getDateTimeFormatted(endDate)) + } + + const onEndDateSelected = (date) => { + setEndDate(date) + getLogsApi.request(getDateTimeFormatted(startDate), getDateTimeFormatted(date)) + } + + useEffect(() => { + const currentTimeFormatted = getDateTimeFormatted() + const startTimeFormatted = subtractTime(0, 0, 1) + getLogsApi.request(startTimeFormatted, currentTimeFormatted) + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + useEffect(() => { + setLoading(getLogsApi.loading) + }, [getLogsApi.loading]) + + useEffect(() => { + if (getLogsApi.data && getLogsApi.data.length > 0) { + let totalLogs = '' + for (const logData of getLogsApi.data) { + totalLogs += logData + '\n' + } + setLogData(totalLogs) + } + }, [getLogsApi.data]) + + return ( + + {error ? ( + + ) : ( + + + {isLoading ? ( + + + + + + + + + + + + + + + + ) : ( + <> + + + {selectedTimeSearch === 'Custom' && ( + <> + + From + onStartDateSelected(date)} + selectsStart + startDate={startDate} + endDate={endDate} + maxDate={endDate} + showTimeSelect + timeFormat='HH:mm' + timeIntervals={60} + dateFormat='yyyy MMMM d, h aa' + customInput={} + /> + + + To + onEndDateSelected(date)} + selectsEnd + showTimeSelect + timeFormat='HH:mm' + timeIntervals={60} + startDate={startDate} + endDate={endDate} + minDate={startDate} + maxDate={new Date()} + dateFormat='yyyy MMMM d, h aa' + customInput={} + /> + + + )} + + {logData ? ( + + ) : ( + + + LogsEmptySVG + +
    No Logs Yet
    +
    + )} + + )} +
    + )} +
    + ) +} + +export default Logs diff --git a/packages/ui/src/views/settings/index.jsx b/packages/ui/src/views/settings/index.jsx index 4ef37c74a..9aafd1a73 100644 --- a/packages/ui/src/views/settings/index.jsx +++ b/packages/ui/src/views/settings/index.jsx @@ -16,6 +16,7 @@ import Transitions from '@/ui-component/extended/Transitions' import settings from '@/menu-items/settings' import agentsettings from '@/menu-items/agentsettings' import customAssistantSettings from '@/menu-items/customassistant' +import { useAuth } from '@/hooks/useAuth' // ==============================|| SETTINGS ||============================== // @@ -25,6 +26,7 @@ const Settings = ({ chatflow, isSettingsOpen, isCustomAssistant, anchorEl, isAge const customization = useSelector((state) => state.customization) const inputFile = useRef(null) const [open, setOpen] = useState(false) + const { hasPermission } = useAuth() const handleFileUpload = (e) => { if (!e.target.files) return @@ -64,6 +66,9 @@ const Settings = ({ chatflow, isSettingsOpen, isCustomAssistant, anchorEl, isAge // settings list items const items = settingsMenu.map((menu) => { + if (menu.permission && !hasPermission(menu.permission)) { + return null + } const Icon = menu.icon const itemIcon = menu?.icon ? ( diff --git a/packages/ui/src/views/tools/ToolDialog.jsx b/packages/ui/src/views/tools/ToolDialog.jsx index de048b639..b73b15ec1 100644 --- a/packages/ui/src/views/tools/ToolDialog.jsx +++ b/packages/ui/src/views/tools/ToolDialog.jsx @@ -14,6 +14,9 @@ import DeleteIcon from '@mui/icons-material/Delete' import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' import { CodeEditor } from '@/ui-component/editor/CodeEditor' import HowToUseFunctionDialog from './HowToUseFunctionDialog' +import { PermissionButton, StyledPermissionButton } from '@/ui-component/button/RBACButtons' +import { Available } from '@/ui-component/rbac/available' +import ExportAsTemplateDialog from '@/ui-component/dialog/ExportAsTemplateDialog' import PasteJSONDialog from './PasteJSONDialog' // Icons @@ -30,7 +33,6 @@ import useApi from '@/hooks/useApi' import useNotifier from '@/utils/useNotifier' import { generateRandomGradient, formatDataGridRows } from '@/utils/genericHelper' import { HIDE_CANVAS_DIALOG, SHOW_CANVAS_DIALOG } from '@/store/actions' -import ExportAsTemplateDialog from '@/ui-component/dialog/ExportAsTemplateDialog' const exampleAPIFunc = `/* * You can use any libraries imported in Flowise @@ -432,7 +434,8 @@ const ToolDialog = ({ show, dialogProps, onUseTemplate, onCancel, onConfirm, set {dialogProps.type === 'EDIT' && ( <> - - + )} @@ -561,23 +569,26 @@ const ToolDialog = ({ show, dialogProps, onUseTemplate, onCancel, onConfirm, set {dialogProps.type === 'EDIT' && ( - deleteTool()}> + deleteTool()}> Delete - + )} {dialogProps.type === 'TEMPLATE' && ( - - Use Template - + + + Use Template + + )} {dialogProps.type !== 'TEMPLATE' && ( - (dialogProps.type === 'ADD' || dialogProps.type === 'IMPORT' ? addNewTool() : saveTool())} > {dialogProps.confirmButtonName} - + )} diff --git a/packages/ui/src/views/tools/index.jsx b/packages/ui/src/views/tools/index.jsx index 1a2e5d803..10dff4351 100644 --- a/packages/ui/src/views/tools/index.jsx +++ b/packages/ui/src/views/tools/index.jsx @@ -1,43 +1,64 @@ import { useEffect, useState, useRef } from 'react' // material-ui -import { Box, Stack, Button, ButtonGroup, Skeleton, ToggleButtonGroup, ToggleButton } from '@mui/material' +import { Box, Stack, ButtonGroup, Skeleton, ToggleButtonGroup, ToggleButton } from '@mui/material' +import { useTheme } from '@mui/material/styles' // project imports import MainCard from '@/ui-component/cards/MainCard' import ItemCard from '@/ui-component/cards/ItemCard' -import { gridSpacing } from '@/store/constant' -import ToolEmptySVG from '@/assets/images/tools_empty.svg' -import { StyledButton } from '@/ui-component/button/StyledButton' import ToolDialog from './ToolDialog' +import ViewHeader from '@/layout/MainLayout/ViewHeader' +import ErrorBoundary from '@/ErrorBoundary' import { ToolsTable } from '@/ui-component/table/ToolsListTable' +import { PermissionButton, StyledPermissionButton } from '@/ui-component/button/RBACButtons' +import TablePagination, { DEFAULT_ITEMS_PER_PAGE } from '@/ui-component/pagination/TablePagination' // API import toolsApi from '@/api/tools' // Hooks import useApi from '@/hooks/useApi' +import { useError } from '@/store/context/ErrorContext' +import { gridSpacing } from '@/store/constant' // icons import { IconPlus, IconFileUpload, IconLayoutGrid, IconList } from '@tabler/icons-react' -import ViewHeader from '@/layout/MainLayout/ViewHeader' -import ErrorBoundary from '@/ErrorBoundary' -import { useTheme } from '@mui/material/styles' +import ToolEmptySVG from '@/assets/images/tools_empty.svg' -// ==============================|| CHATFLOWS ||============================== // +// ==============================|| TOOLS ||============================== // const Tools = () => { const theme = useTheme() const getAllToolsApi = useApi(toolsApi.getAllTools) + const { error, setError } = useError() const [isLoading, setLoading] = useState(true) - const [error, setError] = useState(null) const [showDialog, setShowDialog] = useState(false) const [dialogProps, setDialogProps] = useState({}) const [view, setView] = useState(localStorage.getItem('toolsDisplayStyle') || 'card') const inputRef = useRef(null) + /* Table Pagination */ + const [currentPage, setCurrentPage] = useState(1) + const [pageLimit, setPageLimit] = useState(DEFAULT_ITEMS_PER_PAGE) + const [total, setTotal] = useState(0) + + const onChange = (page, pageLimit) => { + setCurrentPage(page) + setPageLimit(pageLimit) + refresh(page, pageLimit) + } + + const refresh = (page, limit) => { + const params = { + page: page || currentPage, + limit: limit || pageLimit + } + getAllToolsApi.request(params) + } + const handleChange = (event, nextView) => { if (nextView === null) return localStorage.setItem('toolsDisplayStyle', nextView) @@ -101,7 +122,7 @@ const Tools = () => { const onConfirm = () => { setShowDialog(false) - getAllToolsApi.request() + refresh(currentPage, pageLimit) } const [search, setSearch] = useState('') @@ -116,8 +137,7 @@ const Tools = () => { } useEffect(() => { - getAllToolsApi.request() - + refresh(currentPage, pageLimit) // eslint-disable-next-line react-hooks/exhaustive-deps }, []) @@ -126,10 +146,10 @@ const Tools = () => { }, [getAllToolsApi.loading]) useEffect(() => { - if (getAllToolsApi.error) { - setError(getAllToolsApi.error) + if (getAllToolsApi.data) { + setTotal(getAllToolsApi.data.total) } - }, [getAllToolsApi.error]) + }, [getAllToolsApi.data]) return ( <> @@ -149,6 +169,7 @@ const Tools = () => { sx={{ borderRadius: 2, maxHeight: 40 }} value={view} color='primary' + disabled={total === 0} exclusive onChange={handleChange} > @@ -178,14 +199,15 @@ const Tools = () => { - + { /> - } sx={{ borderRadius: 2, height: 40 }} > Create - + - {!view || view === 'card' ? ( + {isLoading && ( + + + + + + )} + {!isLoading && total > 0 && ( <> - {isLoading ? ( + {!view || view === 'card' ? ( - - - + {getAllToolsApi.data?.data?.filter(filterTools).map((data, index) => ( + edit(data)} /> + ))} ) : ( - - {getAllToolsApi.data && - getAllToolsApi.data - ?.filter(filterTools) - .map((data, index) => edit(data)} />)} - + )} + {/* Pagination and Page Size Controls */} + - ) : ( - )} - {!isLoading && (!getAllToolsApi.data || getAllToolsApi.data.length === 0) && ( + {!isLoading && total === 0 && ( { + const portalElement = document.getElementById('portal') + const currentUser = useSelector((state) => state.auth.user) + + const dispatch = useDispatch() + + useNotifier() + + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const [userName, setUserName] = useState('') + const [userEmail, setUserEmail] = useState('') + const [status, setStatus] = useState('active') + const [user, setUser] = useState({}) + + useEffect(() => { + if (dialogProps.type === 'EDIT' && dialogProps.data) { + setUser(dialogProps.data.user) + setUserEmail(dialogProps.data.user.email) + setUserName(dialogProps.data.user.name) + setStatus(dialogProps.data.user.status) + } + + return () => { + setUserEmail('') + setUserName('') + setStatus('active') + setUser({}) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [dialogProps]) + + useEffect(() => { + if (show) dispatch({ type: SHOW_CANVAS_DIALOG }) + else dispatch({ type: HIDE_CANVAS_DIALOG }) + return () => dispatch({ type: HIDE_CANVAS_DIALOG }) + }, [show, dispatch]) + + const updateUser = async () => { + try { + const saveObj = { + userId: user.id, + organizationId: currentUser.activeOrganizationId, + status: status + } + + const saveResp = await userApi.updateOrganizationUser(saveObj) + if (saveResp.data) { + enqueueSnackbar({ + message: 'User Details Updated', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm(saveResp.data.id) + } + } catch (error) { + setError(err) + enqueueSnackbar({ + message: `Failed to update User: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + onCancel() + } + } + + const component = show ? ( + + +
    + + {'Edit User'} +
    +
    + + +
    + + Email * + + +
    +
    + setUserEmail(e.target.value)} + value={userEmail ?? ''} + /> +
    + +
    + Name + +
    +
    + setUserName(e.target.value)} + value={userName ?? ''} + /> +
    + +
    + + Account Status * + +
    +
    + setStatus(newValue)} + value={status ?? 'choose an option'} + id='dropdown_status' + /> + {dialogProps?.data?.isOrgOwner && ( + + Cannot change status of the organization owner! + + )} +
    +
    + + updateUser()} id='btn_confirmInviteUser'> + {dialogProps.confirmButtonName} + + + +
    + ) : null + + return createPortal(component, portalElement) +} + +EditUserDialog.propTypes = { + show: PropTypes.bool, + dialogProps: PropTypes.object, + onCancel: PropTypes.func, + onConfirm: PropTypes.func, + setError: PropTypes.func +} + +export default EditUserDialog diff --git a/packages/ui/src/views/users/index.jsx b/packages/ui/src/views/users/index.jsx new file mode 100644 index 000000000..ce6ad2e03 --- /dev/null +++ b/packages/ui/src/views/users/index.jsx @@ -0,0 +1,539 @@ +import React, { useEffect, useState } from 'react' +import { useDispatch, useSelector } from 'react-redux' +import moment from 'moment' +import * as PropTypes from 'prop-types' + +// material-ui +import { + Button, + Box, + Skeleton, + Stack, + Table, + TableBody, + TableContainer, + TableHead, + TableRow, + Paper, + useTheme, + Chip, + Drawer, + Typography, + CircularProgress +} from '@mui/material' + +// project imports +import MainCard from '@/ui-component/cards/MainCard' +import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' +import ViewHeader from '@/layout/MainLayout/ViewHeader' +import ErrorBoundary from '@/ErrorBoundary' +import EditUserDialog from '@/views/users/EditUserDialog' +import { StyledTableCell, StyledTableRow } from '@/ui-component/table/TableStyles' +import InviteUsersDialog from '@/ui-component/dialog/InviteUsersDialog' +import { PermissionIconButton, StyledPermissionButton } from '@/ui-component/button/RBACButtons' + +// API +import userApi from '@/api/user' + +// Hooks +import useApi from '@/hooks/useApi' +import useConfirm from '@/hooks/useConfirm' + +// utils +import useNotifier from '@/utils/useNotifier' + +// Icons +import { IconTrash, IconEdit, IconX, IconPlus, IconUser, IconEyeOff, IconEye, IconUserStar } from '@tabler/icons-react' +import users_emptySVG from '@/assets/images/users_empty.svg' + +// store +import { useError } from '@/store/context/ErrorContext' +import { enqueueSnackbar as enqueueSnackbarAction, closeSnackbar as closeSnackbarAction } from '@/store/actions' + +function ShowUserRow(props) { + const customization = useSelector((state) => state.customization) + + const [open, setOpen] = useState(false) + const [userRoles, setUserRoles] = useState([]) + + const theme = useTheme() + + const getWorkspacesByUserId = useApi(userApi.getWorkspacesByOrganizationIdUserId) + + const handleViewUserRoles = (userId, organizationId) => { + setOpen(!open) + getWorkspacesByUserId.request(organizationId, userId) + } + + useEffect(() => { + if (getWorkspacesByUserId.data) { + setUserRoles(getWorkspacesByUserId.data) + } + }, [getWorkspacesByUserId.data]) + + useEffect(() => { + if (!open) { + setOpen(false) + setUserRoles([]) + } + }, [open]) + + const currentUser = useSelector((state) => state.auth.user) + + return ( + + + +
    +
    + {props?.row?.isOrgOwner ? ( + + ) : ( + + )} +
    +
    +
    + + {props.row.user.name ?? ''} + {props.row.user.email && ( + <> +
    + {props.row.user.email} + + )} + + {props.row.isOrgOwner && ( + <> + {' '} +
    + {' '} + + )} +
    + + {props.row.roleCount} + handleViewUserRoles(props.row.userId, props.row.organizationId)} + > + {props.row.roleCount > 0 && open ? : } + + + + {'ACTIVE' === props.row.status.toUpperCase() && } + {'INVITED' === props.row.status.toUpperCase() && } + {'INACTIVE' === props.row.status.toUpperCase() && } + + {!props.row.lastLogin ? 'Never' : moment(props.row.lastLogin).format('DD/MM/YYYY HH:mm')} + + {props.row.status.toUpperCase() === 'INVITED' && ( + props.onEditClick(props.row)} + > + + + )} + {!props.row.isOrgOwner && + props.row.userId !== currentUser.id && + (props.deletingUserId === props.row.user.id ? ( + + ) : ( + props.onDeleteClick(props.row.user)} + > + + + ))} + +
    + setOpen(false)} sx={{ minWidth: 320 }}> + + + Assigned Roles + + + + + + Role + Workspace + + + + {userRoles.map((item, index) => ( + + {item.role.name} + + {item.workspace.name} + {/* {assignment.active && } */} + + + ))} + +
    +
    +
    +
    +
    + ) +} + +ShowUserRow.propTypes = { + row: PropTypes.any, + onDeleteClick: PropTypes.func, + onEditClick: PropTypes.func, + open: PropTypes.bool, + theme: PropTypes.any, + deletingUserId: PropTypes.string +} + +// ==============================|| Users ||============================== // + +const Users = () => { + const theme = useTheme() + const customization = useSelector((state) => state.customization) + const dispatch = useDispatch() + useNotifier() + const { error, setError } = useError() + const currentUser = useSelector((state) => state.auth.user) + + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const [isLoading, setLoading] = useState(true) + const [showInviteDialog, setShowInviteDialog] = useState(false) + const [showEditDialog, setShowEditDialog] = useState(false) + const [inviteDialogProps, setInviteDialogProps] = useState({}) + const [users, setUsers] = useState([]) + const [search, setSearch] = useState('') + const [deletingUserId, setDeletingUserId] = useState(null) + + const { confirm } = useConfirm() + + const getAllUsersByOrganizationIdApi = useApi(userApi.getAllUsersByOrganizationId) + + const onSearchChange = (event) => { + setSearch(event.target.value) + } + + function filterUsers(data) { + return ( + data.user.name?.toLowerCase().indexOf(search.toLowerCase()) > -1 || + data.user.email.toLowerCase().indexOf(search.toLowerCase()) > -1 + ) + } + + const addNew = () => { + const dialogProp = { + type: 'ADD', + cancelButtonName: 'Cancel', + confirmButtonName: 'Send Invite', + data: null + } + setInviteDialogProps(dialogProp) + setShowInviteDialog(true) + } + + const edit = (user) => { + if (user.status.toUpperCase() === 'INVITED') { + editInvite(user) + } else { + editUser(user) + } + } + + const editInvite = (user) => { + const dialogProp = { + type: 'EDIT', + cancelButtonName: 'Cancel', + confirmButtonName: 'Update Invite', + data: user + } + setInviteDialogProps(dialogProp) + setShowInviteDialog(true) + } + + const editUser = (user) => { + const dialogProp = { + type: 'EDIT', + cancelButtonName: 'Cancel', + confirmButtonName: 'Save', + data: user + } + setInviteDialogProps(dialogProp) + setShowEditDialog(true) + } + + const deleteUser = async (user) => { + const confirmPayload = { + title: `Delete`, + description: `Remove ${user.name ?? user.email} from organization?`, + confirmButtonName: 'Delete', + cancelButtonName: 'Cancel' + } + const isConfirmed = await confirm(confirmPayload) + + if (isConfirmed) { + try { + setDeletingUserId(user.id) + const deleteResp = await userApi.deleteOrganizationUser(currentUser.activeOrganizationId, user.id) + if (deleteResp.data) { + enqueueSnackbar({ + message: 'User removed from organization successfully', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm() + } + } catch (error) { + enqueueSnackbar({ + message: `Failed to delete User: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } finally { + setDeletingUserId(null) + } + } + } + + const onConfirm = () => { + setShowInviteDialog(false) + setShowEditDialog(false) + getAllUsersByOrganizationIdApi.request(currentUser.activeOrganizationId) + } + + useEffect(() => { + getAllUsersByOrganizationIdApi.request(currentUser.activeOrganizationId) + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + useEffect(() => { + setLoading(getAllUsersByOrganizationIdApi.loading) + }, [getAllUsersByOrganizationIdApi.loading]) + + useEffect(() => { + if (getAllUsersByOrganizationIdApi.error) { + setError(getAllUsersByOrganizationIdApi.error) + } + }, [getAllUsersByOrganizationIdApi.error, setError]) + + useEffect(() => { + if (getAllUsersByOrganizationIdApi.data) { + const users = getAllUsersByOrganizationIdApi.data || [] + const orgAdmin = users.find((user) => user.isOrgOwner === true) + if (orgAdmin) { + users.splice(users.indexOf(orgAdmin), 1) + users.unshift(orgAdmin) + } + setUsers(users) + } + }, [getAllUsersByOrganizationIdApi.data]) + + return ( + <> + + {error ? ( + + ) : ( + + + } + id='btn_createUser' + > + Invite User + + + {!isLoading && users.length === 0 ? ( + + + users_emptySVG + +
    No Users Yet
    +
    + ) : ( + <> + + + + + + +   + Email/Name + Assigned Roles + Status + Last Login + + + + + {isLoading ? ( + <> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ) : ( + <> + {users.filter(filterUsers).map((item, index) => ( + + ))} + + )} + +
    +
    +
    +
    + + )} +
    + )} +
    + {showInviteDialog && ( + setShowInviteDialog(false)} + onConfirm={onConfirm} + > + )} + {showEditDialog && ( + setShowEditDialog(false)} + onConfirm={onConfirm} + setError={setError} + > + )} + + + ) +} + +export default Users diff --git a/packages/ui/src/views/variables/AddEditVariableDialog.jsx b/packages/ui/src/views/variables/AddEditVariableDialog.jsx index 9647ffc04..dd82c7560 100644 --- a/packages/ui/src/views/variables/AddEditVariableDialog.jsx +++ b/packages/ui/src/views/variables/AddEditVariableDialog.jsx @@ -114,7 +114,7 @@ const AddEditVariableDialog = ({ show, dialogProps, onCancel, onConfirm, setErro if (setError) setError(err) enqueueSnackbar({ message: `Failed to add new Variable: ${ - typeof error.response.data === 'object' ? error.response.data.message : error.response.data + typeof err.response.data === 'object' ? err.response.data.message : err.response.data }`, options: { key: new Date().getTime() + Math.random(), @@ -155,11 +155,11 @@ const AddEditVariableDialog = ({ show, dialogProps, onCancel, onConfirm, setErro }) onConfirm(saveResp.data.id) } - } catch (error) { + } catch (err) { if (setError) setError(err) enqueueSnackbar({ message: `Failed to save Variable: ${ - typeof error.response.data === 'object' ? error.response.data.message : error.response.data + typeof err.response.data === 'object' ? err.response.data.message : err.response.data }`, options: { key: new Date().getTime() + Math.random(), diff --git a/packages/ui/src/views/variables/index.jsx b/packages/ui/src/views/variables/index.jsx index 54d456e89..01e1d3225 100644 --- a/packages/ui/src/views/variables/index.jsx +++ b/packages/ui/src/views/variables/index.jsx @@ -25,13 +25,15 @@ import { // project imports import MainCard from '@/ui-component/cards/MainCard' -import { StyledButton } from '@/ui-component/button/StyledButton' import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' -import { refreshVariablesCache } from '@/ui-component/input/suggestionOption' import AddEditVariableDialog from './AddEditVariableDialog' import HowToUseVariablesDialog from './HowToUseVariablesDialog' import ViewHeader from '@/layout/MainLayout/ViewHeader' import ErrorBoundary from '@/ErrorBoundary' +import { StyledPermissionButton } from '@/ui-component/button/RBACButtons' +import { Available } from '@/ui-component/rbac/available' +import { refreshVariablesCache } from '@/ui-component/input/suggestionOption' +import TablePagination, { DEFAULT_ITEMS_PER_PAGE } from '@/ui-component/pagination/TablePagination' // API import variablesApi from '@/api/variables' @@ -47,6 +49,9 @@ import useNotifier from '@/utils/useNotifier' import { IconTrash, IconEdit, IconX, IconPlus, IconVariable } from '@tabler/icons-react' import VariablesEmptySVG from '@/assets/images/variables_empty.svg' +// const +import { useError } from '@/store/context/ErrorContext' + const StyledTableCell = styled(TableCell)(({ theme }) => ({ borderColor: theme.palette.grey[900] + 25, @@ -73,12 +78,12 @@ const Variables = () => { const customization = useSelector((state) => state.customization) const dispatch = useDispatch() useNotifier() + const { error, setError } = useError() const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) const [isLoading, setLoading] = useState(true) - const [error, setError] = useState(null) const [showVariableDialog, setShowVariableDialog] = useState(false) const [variableDialogProps, setVariableDialogProps] = useState({}) const [variables, setVariables] = useState([]) @@ -87,8 +92,27 @@ const Variables = () => { const { confirm } = useConfirm() const getAllVariables = useApi(variablesApi.getAllVariables) - const [search, setSearch] = useState('') + + /* Table Pagination */ + const [currentPage, setCurrentPage] = useState(1) + const [pageLimit, setPageLimit] = useState(DEFAULT_ITEMS_PER_PAGE) + const [total, setTotal] = useState(0) + + const onChange = (page, pageLimit) => { + setCurrentPage(page) + setPageLimit(pageLimit) + refresh(page, pageLimit) + } + + const refresh = (page, limit) => { + const params = { + page: page || currentPage, + limit: limit || pageLimit + } + getAllVariables.request(params) + } + const onSearchChange = (event) => { setSearch(event.target.value) } @@ -168,12 +192,12 @@ const Variables = () => { const onConfirm = () => { setShowVariableDialog(false) - getAllVariables.request() + refresh(currentPage, pageLimit) refreshVariablesCache() } useEffect(() => { - getAllVariables.request() + refresh(currentPage, pageLimit) // eslint-disable-next-line react-hooks/exhaustive-deps }, []) @@ -181,15 +205,10 @@ const Variables = () => { setLoading(getAllVariables.loading) }, [getAllVariables.loading]) - useEffect(() => { - if (getAllVariables.error) { - setError(getAllVariables.error) - } - }, [getAllVariables.error]) - useEffect(() => { if (getAllVariables.data) { - setVariables(getAllVariables.data) + setVariables(getAllVariables.data.data) + setTotal(getAllVariables.data.total) } }, [getAllVariables.data]) @@ -210,7 +229,8 @@ const Variables = () => { - { id='btn_createVariable' > Add Variable - + {!isLoading && variables.length === 0 ? ( @@ -232,146 +252,169 @@ const Variables = () => {
    No Variables Yet
    ) : ( - - - - - Name - Value - Type - Last Updated - Created - - - - - - {isLoading ? ( - <> - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ) : ( - <> - {variables.filter(filterVariables).map((variable, index) => ( - - -
    + <> + +
    + + + Name + Value + Type + Last Updated + Created + + + + + + + + + + {isLoading ? ( + <> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ) : ( + <> + {variables.filter(filterVariables).map((variable, index) => ( + +
    - + > + +
    + {variable.name} - {variable.name} - -
    - {variable.value} - - - - - {moment(variable.updatedDate).format('MMMM Do, YYYY HH:mm:ss')} - - - {moment(variable.createdDate).format('MMMM Do, YYYY HH:mm:ss')} - - - edit(variable)}> - - - - - deleteVariable(variable)} - > - - - -
    - ))} - - )} -
    -
    -
    + + {variable.value} + + + + + {moment(variable.updatedDate).format('MMMM Do, YYYY HH:mm:ss')} + + + {moment(variable.createdDate).format('MMMM Do, YYYY HH:mm:ss')} + + + + edit(variable)}> + + + + + + + deleteVariable(variable)} + > + + + + + + ))} + + )} + + + + {/* Pagination and Page Size Controls */} + + )}
    )} diff --git a/packages/ui/src/views/vectorstore/UpsertHistoryDialog.jsx b/packages/ui/src/views/vectorstore/UpsertHistoryDialog.jsx index 62f86b562..9ecbe5c33 100644 --- a/packages/ui/src/views/vectorstore/UpsertHistoryDialog.jsx +++ b/packages/ui/src/views/vectorstore/UpsertHistoryDialog.jsx @@ -279,6 +279,7 @@ const UpsertHistoryDialog = ({ show, dialogProps, onCancel }) => { ) } }) + setSelected([]) } } diff --git a/packages/ui/src/views/workspace/AddEditWorkspaceDialog.jsx b/packages/ui/src/views/workspace/AddEditWorkspaceDialog.jsx new file mode 100644 index 000000000..9e49e9c47 --- /dev/null +++ b/packages/ui/src/views/workspace/AddEditWorkspaceDialog.jsx @@ -0,0 +1,261 @@ +import { createPortal } from 'react-dom' +import PropTypes from 'prop-types' +import { useState, useEffect } from 'react' +import { useDispatch, useSelector } from 'react-redux' + +// Material +import { Button, Dialog, DialogActions, DialogContent, DialogTitle, Box, Typography, OutlinedInput } from '@mui/material' + +// Project imports +import { StyledButton } from '@/ui-component/button/StyledButton' +import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' + +// Icons +import { IconX, IconUsersGroup } from '@tabler/icons-react' + +// API +import workspaceApi from '@/api/workspace' + +// utils +import useNotifier from '@/utils/useNotifier' + +// Store +import { store } from '@/store' +import { workspaceNameUpdated } from '@/store/reducers/authSlice' + +// const +import { + enqueueSnackbar as enqueueSnackbarAction, + closeSnackbar as closeSnackbarAction, + HIDE_CANVAS_DIALOG, + SHOW_CANVAS_DIALOG +} from '@/store/actions' + +const AddEditWorkspaceDialog = ({ show, dialogProps, onCancel, onConfirm }) => { + const portalElement = document.getElementById('portal') + + const dispatch = useDispatch() + + // ==============================|| Snackbar ||============================== // + + useNotifier() + + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const [workspaceName, setWorkspaceName] = useState('') + const [workspaceDescription, setWorkspaceDescription] = useState('') + const [dialogType, setDialogType] = useState('ADD') + const [workspace, setWorkspace] = useState({}) + const currentUser = useSelector((state) => state.auth.user) + + useEffect(() => { + if (dialogProps.type === 'EDIT' && dialogProps.data) { + setWorkspaceName(dialogProps.data.name) + setWorkspaceDescription(dialogProps.data.description) + setDialogType('EDIT') + setWorkspace(dialogProps.data) + } else if (dialogProps.type === 'ADD') { + setWorkspaceName('') + setWorkspaceDescription('') + setDialogType('ADD') + setWorkspace({}) + } + + return () => { + setWorkspaceName('') + setWorkspaceDescription('') + setDialogType('ADD') + setWorkspace({}) + } + }, [dialogProps]) + + useEffect(() => { + if (show) dispatch({ type: SHOW_CANVAS_DIALOG }) + else dispatch({ type: HIDE_CANVAS_DIALOG }) + return () => dispatch({ type: HIDE_CANVAS_DIALOG }) + }, [show, dispatch]) + + const addNewWorkspace = async () => { + if (workspaceName === 'Default Workspace' || workspaceName === 'Personal Workspace') { + enqueueSnackbar({ + message: 'Workspace name cannot be Default Workspace or Personal Workspace - this is a reserved name', + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + return + } + try { + const obj = { + name: workspaceName, + description: workspaceDescription, + createdBy: currentUser.id, + organizationId: currentUser.activeOrganizationId, + existingWorkspaceId: currentUser.activeWorkspaceId // this is used to inherit the current role + } + const createResp = await workspaceApi.createWorkspace(obj) + if (createResp.data) { + enqueueSnackbar({ + message: 'New Workspace added', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm(createResp.data.id) + } + } catch (error) { + enqueueSnackbar({ + message: `Failed to add new Workspace: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + onCancel() + } + } + + const saveWorkspace = async () => { + try { + const saveObj = { + id: workspace.id, + name: workspaceName, + description: workspaceDescription, + updatedBy: currentUser.id + } + + const saveResp = await workspaceApi.updateWorkspace(saveObj) + if (saveResp.data) { + store.dispatch(workspaceNameUpdated(saveResp.data)) + enqueueSnackbar({ + message: 'Workspace saved', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm() + } + } catch (error) { + enqueueSnackbar({ + message: `Failed to save Workspace: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + onCancel() + } + } + + const component = show ? ( + + +
    + + {dialogProps.type === 'ADD' ? 'Add Workspace' : 'Edit Workspace'} +
    +
    + + +
    + + Name * + +
    +
    + setWorkspaceName(e.target.value)} + value={workspaceName ?? ''} + /> +
    + +
    + Description +
    +
    + setWorkspaceDescription(e.target.value)} + value={workspaceDescription ?? ''} + /> +
    +
    + + + (dialogType === 'ADD' ? addNewWorkspace() : saveWorkspace())} + > + {dialogProps.confirmButtonName} + + + +
    + ) : null + + return createPortal(component, portalElement) +} + +AddEditWorkspaceDialog.propTypes = { + show: PropTypes.bool, + dialogProps: PropTypes.object, + onCancel: PropTypes.func, + onConfirm: PropTypes.func +} + +export default AddEditWorkspaceDialog diff --git a/packages/ui/src/views/workspace/EditWorkspaceUserRoleDialog.jsx b/packages/ui/src/views/workspace/EditWorkspaceUserRoleDialog.jsx new file mode 100644 index 000000000..5dc6ba207 --- /dev/null +++ b/packages/ui/src/views/workspace/EditWorkspaceUserRoleDialog.jsx @@ -0,0 +1,211 @@ +import { createPortal } from 'react-dom' +import PropTypes from 'prop-types' +import { useState, useEffect } from 'react' +import { useDispatch, useSelector } from 'react-redux' + +// Material +import { + Button, + Dialog, + DialogActions, + DialogContent, + DialogTitle, + Box, + Typography, + Autocomplete, + TextField, + styled, + Popper +} from '@mui/material' + +// Project imports +import { StyledButton } from '@/ui-component/button/StyledButton' +import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' + +// Icons +import { IconX, IconUser } from '@tabler/icons-react' + +// API +import roleApi from '@/api/role' +import workspaceApi from '@/api/workspace' + +// utils +import useNotifier from '@/utils/useNotifier' + +// store +import { HIDE_CANVAS_DIALOG, SHOW_CANVAS_DIALOG } from '@/store/actions' +import { enqueueSnackbar as enqueueSnackbarAction, closeSnackbar as closeSnackbarAction } from '@/store/actions' +import useApi from '@/hooks/useApi' +import { autocompleteClasses } from '@mui/material/Autocomplete' + +const StyledPopper = styled(Popper)({ + boxShadow: '0px 8px 10px -5px rgb(0 0 0 / 20%), 0px 16px 24px 2px rgb(0 0 0 / 14%), 0px 6px 30px 5px rgb(0 0 0 / 12%)', + borderRadius: '10px', + [`& .${autocompleteClasses.listbox}`]: { + boxSizing: 'border-box', + '& ul': { + padding: 10, + margin: 10 + } + } +}) + +const EditWorkspaceUserRoleDialog = ({ show, dialogProps, onCancel, onConfirm }) => { + const portalElement = document.getElementById('portal') + const currentUser = useSelector((state) => state.auth.user) + + const dispatch = useDispatch() + + useNotifier() + + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const [userEmail, setUserEmail] = useState('') + const [user, setUser] = useState({}) + + const [availableRoles, setAvailableRoles] = useState([]) + const [selectedRole, setSelectedRole] = useState('') + const getAllRolesApi = useApi(roleApi.getAllRolesByOrganizationId) + + useEffect(() => { + if (getAllRolesApi.data) { + const roles = getAllRolesApi.data.map((role) => ({ + id: role.id, + name: role.name, + label: role.name, + description: role.description + })) + setAvailableRoles(roles) + if (dialogProps.type === 'EDIT' && dialogProps.data && dialogProps.data.role && dialogProps.data.role.name) { + const userActiveRole = roles.find((role) => role.name === dialogProps.data.role.name) + if (userActiveRole) setSelectedRole(userActiveRole) + } + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getAllRolesApi.data]) + + useEffect(() => { + if (dialogProps.data) { + getAllRolesApi.request(currentUser.activeOrganizationId) + setUser(dialogProps.data.user) + setUserEmail(dialogProps.data.user.email) + } + + return () => { + setUserEmail('') + setUser({}) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [dialogProps]) + + useEffect(() => { + if (show) dispatch({ type: SHOW_CANVAS_DIALOG }) + else dispatch({ type: HIDE_CANVAS_DIALOG }) + return () => dispatch({ type: HIDE_CANVAS_DIALOG }) + }, [show, dispatch]) + + const updateUser = async () => { + try { + const saveObj = { + userId: user.id, + workspaceId: dialogProps.data.workspaceId, + organizationId: currentUser.activeOrganizationId, + roleId: selectedRole.id, + updatedBy: currentUser.id + } + + const saveResp = await workspaceApi.updateWorkspaceUserRole(saveObj) + if (saveResp.data) { + enqueueSnackbar({ + message: 'WorkspaceUser Details Updated', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm(saveResp.data.id) + } + } catch (error) { + enqueueSnackbar({ + message: `Failed to update WorkspaceUser: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } + } + + const handleRoleChange = (event, newRole) => { + setSelectedRole(newRole) + } + + const component = show ? ( + + +
    + + {'Change Workspace Role - '} {userEmail || ''} {user.name ? `(${user.name})` : ''} +
    +
    + + +
    + + New Role to Assign * + +
    +
    + option.label || ''} + options={availableRoles} + renderInput={(params) => } + value={selectedRole} + PopperComponent={StyledPopper} + /> +
    +
    + + updateUser()} id='btn_confirmEditUser'> + {dialogProps.confirmButtonName} + + + +
    + ) : null + + return createPortal(component, portalElement) +} + +EditWorkspaceUserRoleDialog.propTypes = { + show: PropTypes.bool, + dialogProps: PropTypes.object, + onCancel: PropTypes.func, + onConfirm: PropTypes.func +} + +export default EditWorkspaceUserRoleDialog diff --git a/packages/ui/src/views/workspace/WorkspaceUsers.jsx b/packages/ui/src/views/workspace/WorkspaceUsers.jsx new file mode 100644 index 000000000..e1024ca8c --- /dev/null +++ b/packages/ui/src/views/workspace/WorkspaceUsers.jsx @@ -0,0 +1,556 @@ +import { useEffect, useState } from 'react' +import { useDispatch, useSelector } from 'react-redux' +import moment from 'moment' +import { useNavigate } from 'react-router-dom' + +// material-ui +import { + IconButton, + Checkbox, + Skeleton, + Box, + TableRow, + TableContainer, + Paper, + Table, + TableHead, + TableBody, + Button, + Stack, + Chip +} from '@mui/material' +import { useTheme } from '@mui/material/styles' + +// project imports +import MainCard from '@/ui-component/cards/MainCard' +import { StyledTableCell, StyledTableRow } from '@/ui-component/table/TableStyles' +import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' +import ErrorBoundary from '@/ErrorBoundary' +import ViewHeader from '@/layout/MainLayout/ViewHeader' +import { PermissionButton, StyledPermissionButton } from '@/ui-component/button/RBACButtons' +import InviteUsersDialog from '@/ui-component/dialog/InviteUsersDialog' +import EditWorkspaceUserRoleDialog from '@/views/workspace/EditWorkspaceUserRoleDialog' + +// API +import userApi from '@/api/user' +import workspaceApi from '@/api/workspace' + +// Hooks +import useApi from '@/hooks/useApi' +import useNotifier from '@/utils/useNotifier' +import useConfirm from '@/hooks/useConfirm' + +// icons +import empty_datasetSVG from '@/assets/images/empty_datasets.svg' +import { IconEdit, IconX, IconUnlink, IconUserPlus } from '@tabler/icons-react' + +// store +import { useError } from '@/store/context/ErrorContext' +import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction } from '@/store/actions' + +const WorkspaceDetails = () => { + const theme = useTheme() + const customization = useSelector((state) => state.customization) + const currentUser = useSelector((state) => state.auth.user) + const navigate = useNavigate() + + const dispatch = useDispatch() + useNotifier() + const { error, setError } = useError() + + const [search, setSearch] = useState('') + const [workspace, setWorkspace] = useState({}) + const [workspaceUsers, setWorkspaceUsers] = useState([]) + const [isLoading, setLoading] = useState(true) + const [usersSelected, setUsersSelected] = useState([]) + + const [showAddUserDialog, setShowAddUserDialog] = useState(false) + const [dialogProps, setDialogProps] = useState({}) + const [showWorkspaceUserRoleDialog, setShowWorkspaceUserRoleDialog] = useState(false) + const [workspaceUserRoleDialogProps, setWorkspaceUserRoleDialogProps] = useState({}) + + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const { confirm } = useConfirm() + + const getAllUsersByWorkspaceIdApi = useApi(userApi.getAllUsersByWorkspaceId) + const getWorkspaceByIdApi = useApi(workspaceApi.getWorkspaceById) + + const URLpath = document.location.pathname.toString().split('/') + const workspaceId = URLpath[URLpath.length - 1] === 'workspace-users' ? '' : URLpath[URLpath.length - 1] + + const onUsersSelectAllClick = (event) => { + if (event.target.checked) { + const newSelected = (workspaceUsers || []) + .filter((n) => !n.isOrgOwner) + .map((n) => ({ + userId: n.userId, + name: n.user.name, + email: n.user.email + })) + setUsersSelected(newSelected) + return + } + setUsersSelected([]) + } + + const handleUserSelect = (event, user) => { + const selectedIndex = usersSelected.findIndex((item) => item.userId === user.userId) + let newSelected = [] + + if (selectedIndex === -1) { + newSelected = newSelected.concat(usersSelected, { + userId: user.userId, + name: user.user.name, + email: user.user.email + }) + } else if (selectedIndex === 0) { + newSelected = newSelected.concat(usersSelected.slice(1)) + } else if (selectedIndex === usersSelected.length - 1) { + newSelected = newSelected.concat(usersSelected.slice(0, -1)) + } else if (selectedIndex > 0) { + newSelected = newSelected.concat(usersSelected.slice(0, selectedIndex), usersSelected.slice(selectedIndex + 1)) + } + setUsersSelected(newSelected) + } + + const isUserSelected = (userId) => usersSelected.findIndex((item) => item.userId === userId) !== -1 + + const addUser = () => { + const dialogProp = { + type: 'ADD', + cancelButtonName: 'Cancel', + confirmButtonName: 'Send Invite', + data: workspace + } + setDialogProps(dialogProp) + setShowAddUserDialog(true) + } + + const onEditClick = (user) => { + if (user.status.toUpperCase() === 'INVITED') { + editInvite(user) + } else { + editUser(user) + } + } + + const editInvite = (user) => { + const dialogProp = { + type: 'EDIT', + cancelButtonName: 'Cancel', + confirmButtonName: 'Update Invite', + data: { + ...user, + isWorkspaceUser: true + }, + disableWorkspaceSelection: true + } + setDialogProps(dialogProp) + setShowAddUserDialog(true) + } + + const editUser = (user) => { + const userObj = { + ...user, + assignedRoles: [ + { + role: user.role, + active: true + } + ], + workspaceId: workspaceId + } + const dialogProp = { + type: 'EDIT', + cancelButtonName: 'Cancel', + confirmButtonName: 'Update Role', + data: userObj + } + setWorkspaceUserRoleDialogProps(dialogProp) + setShowWorkspaceUserRoleDialog(true) + } + + const unlinkUser = async () => { + const userList = usersSelected.map((user) => (user.name ? `${user.name} (${user.email})` : user.email)).join(', ') + + const confirmPayload = { + title: `Remove Users`, + description: `Remove the following users from the workspace?\n${userList}`, + confirmButtonName: 'Remove', + cancelButtonName: 'Cancel' + } + + const orgOwner = workspaceUsers.find( + (user) => usersSelected.some((selected) => selected.userId === user.id) && user.isOrgOwner === true + ) + if (orgOwner) { + enqueueSnackbar({ + message: `Organization owner cannot be removed from workspace.`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + action: (key) => ( + + ) + } + }) + return + } + + const isConfirmed = await confirm(confirmPayload) + + if (isConfirmed) { + try { + const deletePromises = usersSelected.map((user) => userApi.deleteWorkspaceUser(workspaceId, user.userId)) + await Promise.all(deletePromises) + + enqueueSnackbar({ + message: `${usersSelected.length} User(s) removed from workspace.`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + + // Check if current user is being removed + if (usersSelected.some((user) => user.userId === currentUser.id)) { + navigate('/', { replace: true }) + navigate(0) + return + } + + onConfirm() + } catch (error) { + enqueueSnackbar({ + message: `Failed to unlink users: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } + setUsersSelected([]) + } + } + + const onConfirm = () => { + setShowAddUserDialog(false) + setShowWorkspaceUserRoleDialog(false) + getAllUsersByWorkspaceIdApi.request(workspaceId) + } + + const onSearchChange = (event) => { + setSearch(event.target.value) + } + + function filterUsers(data) { + return ( + data.user.name?.toLowerCase().indexOf(search.toLowerCase()) > -1 || + data.user.email?.toLowerCase().indexOf(search.toLowerCase()) > -1 + ) + } + + useEffect(() => { + getWorkspaceByIdApi.request(workspaceId) + getAllUsersByWorkspaceIdApi.request(workspaceId) + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + useEffect(() => { + if (getWorkspaceByIdApi.data) { + setWorkspace(getWorkspaceByIdApi.data) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getWorkspaceByIdApi.data]) + + useEffect(() => { + if (getAllUsersByWorkspaceIdApi.data) { + const workSpaceUsers = getAllUsersByWorkspaceIdApi.data || [] + const orgAdmin = workSpaceUsers.find((item) => item.isOrgOwner) + if (orgAdmin) { + workSpaceUsers.splice(workSpaceUsers.indexOf(orgAdmin), 1) + workSpaceUsers.unshift(orgAdmin) + } + setWorkspaceUsers(workSpaceUsers) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getAllUsersByWorkspaceIdApi.data]) + + useEffect(() => { + if (getAllUsersByWorkspaceIdApi.error) { + setError(getAllUsersByWorkspaceIdApi.error) + } + }, [getAllUsersByWorkspaceIdApi.error, setError]) + + useEffect(() => { + setLoading(getAllUsersByWorkspaceIdApi.loading) + }, [getAllUsersByWorkspaceIdApi.loading]) + + return ( + <> + + {error ? ( + + ) : ( + + window.history.back()} + search={workspaceUsers.length > 0} + onSearchChange={onSearchChange} + searchPlaceholder={'Search Users'} + title={(workspace?.name || '') + ': Workspace Users'} + description={'Manage workspace users and permissions.'} + > + {workspaceUsers.length > 0 && ( + <> + } + > + Remove Users + + } + > + Add User + + + )} + + {!isLoading && workspaceUsers?.length <= 0 ? ( + + + empty_datasetSVG + +
    No Assigned Users Yet
    + } + onClick={addUser} + > + Add User + +
    + ) : ( + <> + + + + + + + + Email/Name + Role + Status + Last Login + + + + + {isLoading ? ( + <> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ) : ( + <> + {(workspaceUsers || []).filter(filterUsers).map((item, index) => ( + + + {item.isOrgOwner ? null : ( + handleUserSelect(event, item)} + inputProps={{ + 'aria-labelledby': item.userId + }} + /> + )} + + + {item.user.name && ( + <> + {item.user.name} +
    + + )} + {item.user.email} +
    + + {item.isOrgOwner ? ( + + ) : ( + item.role.name + )} + + + {item.isOrgOwner ? ( + <> + ) : ( + <> + {'ACTIVE' === item.status.toUpperCase() && ( + + )} + {'INVITED' === item.status.toUpperCase() && ( + + )} + {'INACTIVE' === item.status.toUpperCase() && ( + + )} + + )} + + + {!item.lastLogin + ? 'Never' + : moment(item.lastLogin).format('DD/MM/YYYY HH:mm')} + + + {!item.isOrgOwner && item.status.toUpperCase() === 'INVITED' && ( + onEditClick(item)} + > + + + )} + {!item.isOrgOwner && item.status.toUpperCase() === 'ACTIVE' && ( + onEditClick(item)} + > + + + )} + +
    + ))} + + )} +
    +
    +
    + + )} +
    + )} +
    + {showAddUserDialog && ( + setShowAddUserDialog(false)} + onConfirm={onConfirm} + > + )} + {showWorkspaceUserRoleDialog && ( + setShowWorkspaceUserRoleDialog(false)} + onConfirm={onConfirm} + /> + )} + + + ) +} + +export default WorkspaceDetails diff --git a/packages/ui/src/views/workspace/index.jsx b/packages/ui/src/views/workspace/index.jsx new file mode 100644 index 000000000..3c12764dd --- /dev/null +++ b/packages/ui/src/views/workspace/index.jsx @@ -0,0 +1,548 @@ +import moment from 'moment/moment' +import * as PropTypes from 'prop-types' +import { Fragment, useEffect, useState } from 'react' +import { useDispatch, useSelector } from 'react-redux' +import { useNavigate } from 'react-router-dom' + +// material-ui +import { + Box, + Button, + Chip, + Drawer, + IconButton, + Paper, + Skeleton, + Stack, + Table, + TableBody, + TableCell, + TableContainer, + TableHead, + TableRow, + Typography, + Dialog, + DialogContent, + CircularProgress +} from '@mui/material' +import { useTheme } from '@mui/material/styles' + +// project imports +import ErrorBoundary from '@/ErrorBoundary' +import ViewHeader from '@/layout/MainLayout/ViewHeader' +import { PermissionIconButton, StyledPermissionButton } from '@/ui-component/button/RBACButtons' +import MainCard from '@/ui-component/cards/MainCard' +import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' +import { StyledTableCell, StyledTableRow } from '@/ui-component/table/TableStyles' +import AddEditWorkspaceDialog from './AddEditWorkspaceDialog' + +// API +import userApi from '@/api/user' +import workspaceApi from '@/api/workspace' + +// Hooks +import useApi from '@/hooks/useApi' +import useConfirm from '@/hooks/useConfirm' + +// icons +import workspaces_emptySVG from '@/assets/images/workspaces_empty.svg' +import { IconEdit, IconEye, IconEyeOff, IconPlus, IconTrash, IconTrashOff, IconUsers, IconX } from '@tabler/icons-react' + +// Utils +import { truncateString } from '@/utils/genericHelper' +import useNotifier from '@/utils/useNotifier' + +// Store +import { store } from '@/store' +import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction } from '@/store/actions' +import { useError } from '@/store/context/ErrorContext' +import { workspaceSwitchSuccess } from '@/store/reducers/authSlice' +import { Link } from 'react-router-dom' + +function ShowWorkspaceRow(props) { + const customization = useSelector((state) => state.customization) + const currentUser = useSelector((state) => state.auth.user) + const [open, setOpen] = useState(false) + const [selectedWorkspaceId, setSelectedWorkspaceId] = useState('') + const [workspaceUsers, setWorkspaceUsers] = useState([]) + + const theme = useTheme() + + const getAllUsersByWorkspaceIdApi = useApi(userApi.getAllUsersByWorkspaceId) + + const handleViewWorkspaceUsers = (workspaceId) => { + setOpen(!open) + setSelectedWorkspaceId(workspaceId) + } + + useEffect(() => { + if (getAllUsersByWorkspaceIdApi.data) { + setWorkspaceUsers(getAllUsersByWorkspaceIdApi.data) + } + }, [getAllUsersByWorkspaceIdApi.data]) + + useEffect(() => { + if (open && selectedWorkspaceId) { + getAllUsersByWorkspaceIdApi.request(selectedWorkspaceId) + } else { + setOpen(false) + setSelectedWorkspaceId('') + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [open]) + + return ( + + + + {props.workspace.name} + {currentUser.activeWorkspaceId === props.workspace.id && ( + + )} + + + {truncateString(props.workspace?.description || '', 200)} + + + {props.workspace.userCount}{' '} + {props.workspace.userCount > 0 && ( + handleViewWorkspaceUsers(props.workspace.id)} + > + {props.workspace.userCount > 0 && open ? : } + + )} + + {moment(props.workspace.updatedDate).format('MMMM Do YYYY, hh:mm A')} + + {props.workspace.name !== 'Default Workspace' && ( + props.onEditClick(props.workspace)} + > + + + )} + + + + + + {props.workspace.name !== 'Default Workspace' && + (props.workspace.userCount > 1 || props.workspace.isOrgDefault === true ? ( + props.onDeleteClick(props.workspace)}> + + + ) : ( + props.onDeleteClick(props.workspace)} + > + + + ))} + + + setOpen(false)} sx={{ minWidth: 320 }}> + + + Users + + + + + + User + Role + + + + {workspaceUsers && + workspaceUsers.length > 0 && + workspaceUsers.map((item, index) => ( + + {item.user.name || item.user.email} + + {item.isOrgOwner ? ( + + ) : item.role.name === 'personal workspace' ? ( + + ) : ( + item.role.name + )} + + + ))} + +
    +
    +
    +
    +
    + ) +} + +ShowWorkspaceRow.propTypes = { + rowKey: PropTypes.any, + workspace: PropTypes.any, + onEditClick: PropTypes.func, + onDeleteClick: PropTypes.func, + onViewUsersClick: PropTypes.func, + open: PropTypes.bool, + theme: PropTypes.any +} + +// ==============================|| Workspaces ||============================== // + +const Workspaces = () => { + const navigate = useNavigate() + const theme = useTheme() + const { confirm } = useConfirm() + const currentUser = useSelector((state) => state.auth.user) + const customization = useSelector((state) => state.customization) + + useNotifier() + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const [search, setSearch] = useState('') + const dispatch = useDispatch() + const { error, setError } = useError() + const [isLoading, setLoading] = useState(true) + const [workspaces, setWorkspaces] = useState([]) + const [showWorkspaceDialog, setShowWorkspaceDialog] = useState(false) + const [workspaceDialogProps, setWorkspaceDialogProps] = useState({}) + const [isSwitching, setIsSwitching] = useState(false) + const [isDeleting, setIsDeleting] = useState(false) + + const getAllWorkspacesApi = useApi(workspaceApi.getAllWorkspacesByOrganizationId) + const switchWorkspaceApi = useApi(workspaceApi.switchWorkspace) + + const showWorkspaceUsers = (selectedWorkspace) => { + navigate(`/workspace-users/${selectedWorkspace.id}`) + } + + const onSearchChange = (event) => { + setSearch(event.target.value) + } + + const addNew = () => { + const dialogProp = { + type: 'ADD', + cancelButtonName: 'Cancel', + confirmButtonName: 'Add', + data: {} + } + setWorkspaceDialogProps(dialogProp) + setShowWorkspaceDialog(true) + } + + const edit = (workspace) => { + const dialogProp = { + type: 'EDIT', + cancelButtonName: 'Cancel', + confirmButtonName: 'Save', + data: workspace + } + setWorkspaceDialogProps(dialogProp) + setShowWorkspaceDialog(true) + } + + const deleteWorkspace = async (workspace) => { + const confirmPayload = { + title: `Delete Workspace ${workspace.name}`, + description: `This is irreversible and will remove all associated data inside the workspace. Are you sure you want to delete?`, + confirmButtonName: 'Delete', + cancelButtonName: 'Cancel' + } + const isConfirmed = await confirm(confirmPayload) + + if (isConfirmed) { + setIsDeleting(true) + try { + const deleteWorkspaceId = workspace.id + const deleteResp = await workspaceApi.deleteWorkspace(deleteWorkspaceId) + if (deleteResp.data) { + enqueueSnackbar({ + message: 'Workspace deleted', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm(deleteWorkspaceId, true) + } + } catch (error) { + console.error('Failed to delete workspace:', error) + enqueueSnackbar({ + message: `Failed to delete workspace: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } finally { + setIsDeleting(false) + } + } + } + + const onConfirm = (specificWorkspaceId, isDeleteWorkspace) => { + setShowWorkspaceDialog(false) + getAllWorkspacesApi.request(currentUser.activeOrganizationId) + + const assignedWorkspaces = currentUser.assignedWorkspaces + if (assignedWorkspaces.length === 0 || workspaces.length === 0) { + return + } + + // if the deleted workspace is the active workspace, switch to first available workspace + if (isDeleteWorkspace && currentUser.activeWorkspaceId === specificWorkspaceId) { + setIsSwitching(true) + const workspaceId = workspaces[0].id + switchWorkspaceApi.request(workspaceId) + } else if (!isDeleteWorkspace && specificWorkspaceId) { + setIsSwitching(true) + switchWorkspaceApi.request(specificWorkspaceId) + } + } + + function filterWorkspaces(data) { + return data.name.toLowerCase().indexOf(search.toLowerCase()) > -1 + } + + useEffect(() => { + if (switchWorkspaceApi.data) { + setIsSwitching(false) + + // Create a promise that resolves when the state is updated + const waitForStateUpdate = new Promise((resolve) => { + const unsubscribe = store.subscribe(() => { + const state = store.getState() + if (state.auth.user.activeWorkspaceId === switchWorkspaceApi.data.activeWorkspaceId) { + unsubscribe() + resolve() + } + }) + }) + + // Dispatch and wait for state update before navigating + store.dispatch(workspaceSwitchSuccess(switchWorkspaceApi.data)) + waitForStateUpdate.then(() => { + navigate('/', { replace: true }) + navigate(0) + }) + } + }, [switchWorkspaceApi.data, navigate]) + + useEffect(() => { + if (getAllWorkspacesApi.data) { + setWorkspaces(getAllWorkspacesApi.data) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getAllWorkspacesApi.data]) + + useEffect(() => { + setLoading(getAllWorkspacesApi.loading) + }, [getAllWorkspacesApi.loading]) + + useEffect(() => { + if (getAllWorkspacesApi.error) { + setError(getAllWorkspacesApi.error) + } + }, [getAllWorkspacesApi.error, setError]) + + useEffect(() => { + getAllWorkspacesApi.request(currentUser.activeOrganizationId) + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + return ( + <> + + {error ? ( + + ) : ( + + + } + > + Add New + + + {!isLoading && workspaces.length <= 0 ? ( + + + workspaces_emptySVG + +
    No Workspaces Yet
    +
    + ) : ( + + + + + Name + Description + Users + Last Updated + + + + + {isLoading ? ( + <> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ) : ( + <> + {workspaces.filter(filterWorkspaces).map((ds, index) => ( + + ))} + + )} + +
    +
    + )} +
    + )} +
    + {showWorkspaceDialog && ( + setShowWorkspaceDialog(false)} + onConfirm={onConfirm} + > + )} + + + + + + + Switching workspace... + + + + + + + + + + Deleting workspace... + + + + + + ) +} + +export default Workspaces