Merge branch 'main' into feature/OAuth2-Tools
# Conflicts: # packages/components/src/utils.ts
This commit is contained in:
commit
a8ba0956fa
|
|
@ -20,6 +20,9 @@
|
|||
"start-worker": "run-script-os",
|
||||
"start-worker:windows": "cd packages/server/bin && run worker",
|
||||
"start-worker:default": "cd packages/server/bin && ./run worker",
|
||||
"user": "run-script-os",
|
||||
"user:windows": "cd packages/server/bin && run user",
|
||||
"user:default": "cd packages/server/bin && ./run user",
|
||||
"test": "turbo run test",
|
||||
"clean": "pnpm --filter \"./packages/**\" clean",
|
||||
"nuke": "pnpm --filter \"./packages/**\" nuke && rimraf node_modules .turbo",
|
||||
|
|
|
|||
|
|
@ -1401,10 +1401,19 @@ class Agent_Agentflow implements INode {
|
|||
return { response, usedTools, sourceDocuments, artifacts, totalTokens, isWaitingForHumanInput: true }
|
||||
}
|
||||
|
||||
let toolIds: ICommonObject | undefined
|
||||
if (options.analyticHandlers) {
|
||||
toolIds = await options.analyticHandlers.onToolStart(toolCall.name, toolCall.args, options.parentTraceIds)
|
||||
}
|
||||
|
||||
try {
|
||||
//@ts-ignore
|
||||
let toolOutput = await selectedTool.call(toolCall.args, { signal: abortController?.signal }, undefined, flowConfig)
|
||||
|
||||
if (options.analyticHandlers && toolIds) {
|
||||
await options.analyticHandlers.onToolEnd(toolIds, toolOutput)
|
||||
}
|
||||
|
||||
// Extract source documents if present
|
||||
if (typeof toolOutput === 'string' && toolOutput.includes(SOURCE_DOCUMENTS_PREFIX)) {
|
||||
const [output, docs] = toolOutput.split(SOURCE_DOCUMENTS_PREFIX)
|
||||
|
|
@ -1459,6 +1468,10 @@ class Agent_Agentflow implements INode {
|
|||
toolOutput
|
||||
})
|
||||
} catch (e) {
|
||||
if (options.analyticHandlers && toolIds) {
|
||||
await options.analyticHandlers.onToolEnd(toolIds, e)
|
||||
}
|
||||
|
||||
console.error('Error invoking tool:', e)
|
||||
usedTools.push({
|
||||
tool: selectedTool.name,
|
||||
|
|
@ -1650,10 +1663,19 @@ class Agent_Agentflow implements INode {
|
|||
toolsInstance = toolsInstance.filter((tool) => tool.name !== toolCall.name)
|
||||
}
|
||||
if (humanInput.type === 'proceed') {
|
||||
let toolIds: ICommonObject | undefined
|
||||
if (options.analyticHandlers) {
|
||||
toolIds = await options.analyticHandlers.onToolStart(toolCall.name, toolCall.args, options.parentTraceIds)
|
||||
}
|
||||
|
||||
try {
|
||||
//@ts-ignore
|
||||
let toolOutput = await selectedTool.call(toolCall.args, { signal: abortController?.signal }, undefined, flowConfig)
|
||||
|
||||
if (options.analyticHandlers && toolIds) {
|
||||
await options.analyticHandlers.onToolEnd(toolIds, toolOutput)
|
||||
}
|
||||
|
||||
// Extract source documents if present
|
||||
if (typeof toolOutput === 'string' && toolOutput.includes(SOURCE_DOCUMENTS_PREFIX)) {
|
||||
const [output, docs] = toolOutput.split(SOURCE_DOCUMENTS_PREFIX)
|
||||
|
|
@ -1708,6 +1730,10 @@ class Agent_Agentflow implements INode {
|
|||
toolOutput
|
||||
})
|
||||
} catch (e) {
|
||||
if (options.analyticHandlers && toolIds) {
|
||||
await options.analyticHandlers.onToolEnd(toolIds, e)
|
||||
}
|
||||
|
||||
console.error('Error invoking tool:', e)
|
||||
usedTools.push({
|
||||
tool: selectedTool.name,
|
||||
|
|
|
|||
|
|
@ -1,8 +1,10 @@
|
|||
import axios, { AxiosRequestConfig } from 'axios'
|
||||
import { omit } from 'lodash'
|
||||
import { Document } from '@langchain/core/documents'
|
||||
import { TextSplitter } from 'langchain/text_splitter'
|
||||
import axios, { AxiosRequestConfig } from 'axios'
|
||||
import * as https from 'https'
|
||||
import { BaseDocumentLoader } from 'langchain/document_loaders/base'
|
||||
import { TextSplitter } from 'langchain/text_splitter'
|
||||
import { omit } from 'lodash'
|
||||
import { getFileFromStorage } from '../../../src'
|
||||
import { ICommonObject, IDocument, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
import { handleEscapeCharacters } from '../../../src/utils'
|
||||
|
||||
|
|
@ -21,7 +23,7 @@ class API_DocumentLoaders implements INode {
|
|||
constructor() {
|
||||
this.label = 'API Loader'
|
||||
this.name = 'apiLoader'
|
||||
this.version = 2.0
|
||||
this.version = 2.1
|
||||
this.type = 'Document'
|
||||
this.icon = 'api.svg'
|
||||
this.category = 'Document Loaders'
|
||||
|
|
@ -61,6 +63,15 @@ class API_DocumentLoaders implements INode {
|
|||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'SSL Certificate',
|
||||
description: 'Please upload a SSL certificate file in either .pem or .crt',
|
||||
name: 'caFile',
|
||||
type: 'file',
|
||||
fileType: '.pem, .crt',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Body',
|
||||
name: 'body',
|
||||
|
|
@ -105,8 +116,10 @@ class API_DocumentLoaders implements INode {
|
|||
}
|
||||
]
|
||||
}
|
||||
async init(nodeData: INodeData): Promise<any> {
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const headers = nodeData.inputs?.headers as string
|
||||
const caFileBase64 = nodeData.inputs?.caFile as string
|
||||
const url = nodeData.inputs?.url as string
|
||||
const body = nodeData.inputs?.body as string
|
||||
const method = nodeData.inputs?.method as string
|
||||
|
|
@ -120,22 +133,37 @@ class API_DocumentLoaders implements INode {
|
|||
omitMetadataKeys = _omitMetadataKeys.split(',').map((key) => key.trim())
|
||||
}
|
||||
|
||||
const options: ApiLoaderParams = {
|
||||
const apiLoaderParam: ApiLoaderParams = {
|
||||
url,
|
||||
method
|
||||
}
|
||||
|
||||
if (headers) {
|
||||
const parsedHeaders = typeof headers === 'object' ? headers : JSON.parse(headers)
|
||||
options.headers = parsedHeaders
|
||||
apiLoaderParam.headers = parsedHeaders
|
||||
}
|
||||
|
||||
if (caFileBase64.startsWith('FILE-STORAGE::')) {
|
||||
let file = caFileBase64.replace('FILE-STORAGE::', '')
|
||||
file = file.replace('[', '')
|
||||
file = file.replace(']', '')
|
||||
const orgId = options.orgId
|
||||
const chatflowid = options.chatflowid
|
||||
const fileData = await getFileFromStorage(file, orgId, chatflowid)
|
||||
apiLoaderParam.ca = fileData.toString()
|
||||
} else {
|
||||
const splitDataURI = caFileBase64.split(',')
|
||||
splitDataURI.pop()
|
||||
const bf = Buffer.from(splitDataURI.pop() || '', 'base64')
|
||||
apiLoaderParam.ca = bf.toString('utf-8')
|
||||
}
|
||||
|
||||
if (body) {
|
||||
const parsedBody = typeof body === 'object' ? body : JSON.parse(body)
|
||||
options.body = parsedBody
|
||||
apiLoaderParam.body = parsedBody
|
||||
}
|
||||
|
||||
const loader = new ApiLoader(options)
|
||||
const loader = new ApiLoader(apiLoaderParam)
|
||||
|
||||
let docs: IDocument[] = []
|
||||
|
||||
|
|
@ -195,6 +223,7 @@ interface ApiLoaderParams {
|
|||
method: string
|
||||
headers?: ICommonObject
|
||||
body?: ICommonObject
|
||||
ca?: string
|
||||
}
|
||||
|
||||
class ApiLoader extends BaseDocumentLoader {
|
||||
|
|
@ -206,28 +235,36 @@ class ApiLoader extends BaseDocumentLoader {
|
|||
|
||||
public readonly method: string
|
||||
|
||||
constructor({ url, headers, body, method }: ApiLoaderParams) {
|
||||
public readonly ca?: string
|
||||
|
||||
constructor({ url, headers, body, method, ca }: ApiLoaderParams) {
|
||||
super()
|
||||
this.url = url
|
||||
this.headers = headers
|
||||
this.body = body
|
||||
this.method = method
|
||||
this.ca = ca
|
||||
}
|
||||
|
||||
public async load(): Promise<IDocument[]> {
|
||||
if (this.method === 'POST') {
|
||||
return this.executePostRequest(this.url, this.headers, this.body)
|
||||
return this.executePostRequest(this.url, this.headers, this.body, this.ca)
|
||||
} else {
|
||||
return this.executeGetRequest(this.url, this.headers)
|
||||
return this.executeGetRequest(this.url, this.headers, this.ca)
|
||||
}
|
||||
}
|
||||
|
||||
protected async executeGetRequest(url: string, headers?: ICommonObject): Promise<IDocument[]> {
|
||||
protected async executeGetRequest(url: string, headers?: ICommonObject, ca?: string): Promise<IDocument[]> {
|
||||
try {
|
||||
const config: AxiosRequestConfig = {}
|
||||
if (headers) {
|
||||
config.headers = headers
|
||||
}
|
||||
if (ca) {
|
||||
config.httpsAgent = new https.Agent({
|
||||
ca: ca
|
||||
})
|
||||
}
|
||||
const response = await axios.get(url, config)
|
||||
const responseJsonString = JSON.stringify(response.data, null, 2)
|
||||
const doc = new Document({
|
||||
|
|
@ -242,12 +279,17 @@ class ApiLoader extends BaseDocumentLoader {
|
|||
}
|
||||
}
|
||||
|
||||
protected async executePostRequest(url: string, headers?: ICommonObject, body?: ICommonObject): Promise<IDocument[]> {
|
||||
protected async executePostRequest(url: string, headers?: ICommonObject, body?: ICommonObject, ca?: string): Promise<IDocument[]> {
|
||||
try {
|
||||
const config: AxiosRequestConfig = {}
|
||||
if (headers) {
|
||||
config.headers = headers
|
||||
}
|
||||
if (ca) {
|
||||
config.httpsAgent = new https.Agent({
|
||||
ca: ca
|
||||
})
|
||||
}
|
||||
const response = await axios.post(url, body ?? {}, config)
|
||||
const responseJsonString = JSON.stringify(response.data, null, 2)
|
||||
const doc = new Document({
|
||||
|
|
|
|||
|
|
@ -67,6 +67,29 @@ interface ExtractResponse {
|
|||
data?: Record<string, any>
|
||||
}
|
||||
|
||||
interface SearchResult {
|
||||
url: string
|
||||
title: string
|
||||
description: string
|
||||
}
|
||||
|
||||
interface SearchResponse {
|
||||
success: boolean
|
||||
data?: SearchResult[]
|
||||
warning?: string
|
||||
}
|
||||
|
||||
interface SearchRequest {
|
||||
query: string
|
||||
limit?: number
|
||||
tbs?: string
|
||||
lang?: string
|
||||
country?: string
|
||||
location?: string
|
||||
timeout?: number
|
||||
ignoreInvalidURLs?: boolean
|
||||
}
|
||||
|
||||
interface Params {
|
||||
[key: string]: any
|
||||
extractorOptions?: {
|
||||
|
|
@ -161,7 +184,11 @@ class FirecrawlApp {
|
|||
}
|
||||
|
||||
try {
|
||||
const response: AxiosResponse = await this.postRequest(this.apiUrl + '/v1/scrape', validParams, headers)
|
||||
const parameters = {
|
||||
...validParams,
|
||||
integration: 'flowise'
|
||||
}
|
||||
const response: AxiosResponse = await this.postRequest(this.apiUrl + '/v1/scrape', parameters, headers)
|
||||
if (response.status === 200) {
|
||||
const responseData = response.data
|
||||
if (responseData.success) {
|
||||
|
|
@ -259,7 +286,11 @@ class FirecrawlApp {
|
|||
}
|
||||
|
||||
try {
|
||||
const response: AxiosResponse = await this.postRequest(this.apiUrl + '/v1/crawl', validParams, headers)
|
||||
const parameters = {
|
||||
...validParams,
|
||||
integration: 'flowise'
|
||||
}
|
||||
const response: AxiosResponse = await this.postRequest(this.apiUrl + '/v1/crawl', parameters, headers)
|
||||
if (response.status === 200) {
|
||||
const crawlResponse = response.data as CrawlResponse
|
||||
if (!crawlResponse.success) {
|
||||
|
|
@ -367,7 +398,11 @@ class FirecrawlApp {
|
|||
}
|
||||
|
||||
try {
|
||||
const response: AxiosResponse = await this.postRequest(this.apiUrl + '/v1/extract', validParams, headers)
|
||||
const parameters = {
|
||||
...validParams,
|
||||
integration: 'flowise'
|
||||
}
|
||||
const response: AxiosResponse = await this.postRequest(this.apiUrl + '/v1/extract', parameters, headers)
|
||||
if (response.status === 200) {
|
||||
const extractResponse = response.data as ExtractResponse
|
||||
if (waitUntilDone) {
|
||||
|
|
@ -384,18 +419,55 @@ class FirecrawlApp {
|
|||
return { success: false, id: '', url: '' }
|
||||
}
|
||||
|
||||
async search(request: SearchRequest): Promise<SearchResponse> {
|
||||
const headers = this.prepareHeaders()
|
||||
|
||||
// Create a clean payload with only valid parameters
|
||||
const validParams: any = {
|
||||
query: request.query
|
||||
}
|
||||
|
||||
// Add optional parameters if they exist and are not empty
|
||||
const validSearchParams = ['limit', 'tbs', 'lang', 'country', 'location', 'timeout', 'ignoreInvalidURLs'] as const
|
||||
|
||||
validSearchParams.forEach((param) => {
|
||||
if (request[param] !== undefined && request[param] !== null) {
|
||||
validParams[param] = request[param]
|
||||
}
|
||||
})
|
||||
|
||||
try {
|
||||
const parameters = {
|
||||
...validParams,
|
||||
integration: 'flowise'
|
||||
}
|
||||
const response: AxiosResponse = await this.postRequest(this.apiUrl + '/v1/search', parameters, headers)
|
||||
if (response.status === 200) {
|
||||
const searchResponse = response.data as SearchResponse
|
||||
if (!searchResponse.success) {
|
||||
throw new Error(`Search request failed: ${searchResponse.warning || 'Unknown error'}`)
|
||||
}
|
||||
return searchResponse
|
||||
} else {
|
||||
this.handleError(response, 'perform search')
|
||||
}
|
||||
} catch (error: any) {
|
||||
throw new Error(error.message)
|
||||
}
|
||||
return { success: false }
|
||||
}
|
||||
|
||||
private prepareHeaders(idempotencyKey?: string): AxiosRequestHeaders {
|
||||
return {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${this.apiKey}`,
|
||||
'X-Origin': 'flowise',
|
||||
'X-Origin-Type': 'integration',
|
||||
...(idempotencyKey ? { 'x-idempotency-key': idempotencyKey } : {})
|
||||
} as AxiosRequestHeaders & { 'X-Origin': string; 'X-Origin-Type': string; 'x-idempotency-key'?: string }
|
||||
} as AxiosRequestHeaders & { 'x-idempotency-key'?: string }
|
||||
}
|
||||
|
||||
private postRequest(url: string, data: Params, headers: AxiosRequestHeaders): Promise<AxiosResponse> {
|
||||
return axios.post(url, data, { headers })
|
||||
private async postRequest(url: string, data: Params, headers: AxiosRequestHeaders): Promise<AxiosResponse> {
|
||||
const result = await axios.post(url, data, { headers })
|
||||
return result
|
||||
}
|
||||
|
||||
private getRequest(url: string, headers: AxiosRequestHeaders): Promise<AxiosResponse> {
|
||||
|
|
@ -468,29 +540,32 @@ class FirecrawlApp {
|
|||
|
||||
// FireCrawl Loader
|
||||
interface FirecrawlLoaderParameters {
|
||||
url: string
|
||||
url?: string
|
||||
query?: string
|
||||
apiKey?: string
|
||||
apiUrl?: string
|
||||
mode?: 'crawl' | 'scrape' | 'extract'
|
||||
mode?: 'crawl' | 'scrape' | 'extract' | 'search'
|
||||
params?: Record<string, unknown>
|
||||
}
|
||||
|
||||
export class FireCrawlLoader extends BaseDocumentLoader {
|
||||
private apiKey: string
|
||||
private apiUrl: string
|
||||
private url: string
|
||||
private mode: 'crawl' | 'scrape' | 'extract'
|
||||
private url?: string
|
||||
private query?: string
|
||||
private mode: 'crawl' | 'scrape' | 'extract' | 'search'
|
||||
private params?: Record<string, unknown>
|
||||
|
||||
constructor(loaderParams: FirecrawlLoaderParameters) {
|
||||
super()
|
||||
const { apiKey, apiUrl, url, mode = 'crawl', params } = loaderParams
|
||||
const { apiKey, apiUrl, url, query, mode = 'crawl', params } = loaderParams
|
||||
if (!apiKey) {
|
||||
throw new Error('Firecrawl API key not set. You can set it as FIRECRAWL_API_KEY in your .env file, or pass it to Firecrawl.')
|
||||
}
|
||||
|
||||
this.apiKey = apiKey
|
||||
this.url = url
|
||||
this.query = query
|
||||
this.mode = mode
|
||||
this.params = params
|
||||
this.apiUrl = apiUrl || 'https://api.firecrawl.dev'
|
||||
|
|
@ -500,13 +575,37 @@ export class FireCrawlLoader extends BaseDocumentLoader {
|
|||
const app = new FirecrawlApp({ apiKey: this.apiKey, apiUrl: this.apiUrl })
|
||||
let firecrawlDocs: FirecrawlDocument[]
|
||||
|
||||
if (this.mode === 'scrape') {
|
||||
if (this.mode === 'search') {
|
||||
if (!this.query) {
|
||||
throw new Error('Firecrawl: Query is required for search mode')
|
||||
}
|
||||
const response = await app.search({ query: this.query, ...this.params })
|
||||
if (!response.success) {
|
||||
throw new Error(`Firecrawl: Failed to search. Warning: ${response.warning}`)
|
||||
}
|
||||
|
||||
// Convert search results to FirecrawlDocument format
|
||||
firecrawlDocs = (response.data || []).map((result) => ({
|
||||
markdown: result.description,
|
||||
metadata: {
|
||||
title: result.title,
|
||||
sourceURL: result.url,
|
||||
description: result.description
|
||||
}
|
||||
}))
|
||||
} else if (this.mode === 'scrape') {
|
||||
if (!this.url) {
|
||||
throw new Error('Firecrawl: URL is required for scrape mode')
|
||||
}
|
||||
const response = await app.scrapeUrl(this.url, this.params)
|
||||
if (!response.success) {
|
||||
throw new Error(`Firecrawl: Failed to scrape URL. Error: ${response.error}`)
|
||||
}
|
||||
firecrawlDocs = [response.data as FirecrawlDocument]
|
||||
} else if (this.mode === 'crawl') {
|
||||
if (!this.url) {
|
||||
throw new Error('Firecrawl: URL is required for crawl mode')
|
||||
}
|
||||
const response = await app.crawlUrl(this.url, this.params)
|
||||
if ('status' in response) {
|
||||
if (response.status === 'failed') {
|
||||
|
|
@ -520,6 +619,9 @@ export class FireCrawlLoader extends BaseDocumentLoader {
|
|||
firecrawlDocs = [response.data as FirecrawlDocument]
|
||||
}
|
||||
} else if (this.mode === 'extract') {
|
||||
if (!this.url) {
|
||||
throw new Error('Firecrawl: URL is required for extract mode')
|
||||
}
|
||||
this.params!.urls = [this.url]
|
||||
const response = await app.extract(this.params as any as ExtractRequest)
|
||||
if (!response.success) {
|
||||
|
|
@ -557,7 +659,7 @@ export class FireCrawlLoader extends BaseDocumentLoader {
|
|||
}
|
||||
return []
|
||||
} else {
|
||||
throw new Error(`Unrecognized mode '${this.mode}'. Expected one of 'crawl', 'scrape', 'extract'.`)
|
||||
throw new Error(`Unrecognized mode '${this.mode}'. Expected one of 'crawl', 'scrape', 'extract', 'search'.`)
|
||||
}
|
||||
|
||||
// Convert Firecrawl documents to LangChain documents
|
||||
|
|
@ -602,7 +704,7 @@ class FireCrawl_DocumentLoaders implements INode {
|
|||
this.name = 'fireCrawl'
|
||||
this.type = 'Document'
|
||||
this.icon = 'firecrawl.png'
|
||||
this.version = 3.0
|
||||
this.version = 4.0
|
||||
this.category = 'Document Loaders'
|
||||
this.description = 'Load data from URL using FireCrawl'
|
||||
this.baseClasses = [this.type]
|
||||
|
|
@ -620,14 +722,7 @@ class FireCrawl_DocumentLoaders implements INode {
|
|||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'URLs',
|
||||
name: 'url',
|
||||
type: 'string',
|
||||
description: 'URL to be crawled/scraped/extracted',
|
||||
placeholder: 'https://docs.flowiseai.com'
|
||||
},
|
||||
{
|
||||
label: 'Crawler type',
|
||||
label: 'Type',
|
||||
type: 'options',
|
||||
name: 'crawlerType',
|
||||
options: [
|
||||
|
|
@ -645,89 +740,179 @@ class FireCrawl_DocumentLoaders implements INode {
|
|||
label: 'Extract',
|
||||
name: 'extract',
|
||||
description: 'Extract data from a URL'
|
||||
},
|
||||
{
|
||||
label: 'Search',
|
||||
name: 'search',
|
||||
description: 'Search the web using FireCrawl'
|
||||
}
|
||||
],
|
||||
default: 'crawl'
|
||||
},
|
||||
{
|
||||
label: 'URLs',
|
||||
name: 'url',
|
||||
type: 'string',
|
||||
description: 'URL to be crawled/scraped/extracted',
|
||||
placeholder: 'https://docs.flowiseai.com',
|
||||
optional: true,
|
||||
show: {
|
||||
crawlerType: ['crawl', 'scrape', 'extract']
|
||||
}
|
||||
},
|
||||
{
|
||||
// includeTags
|
||||
label: '[Scrape] Include Tags',
|
||||
label: 'Include Tags',
|
||||
name: 'includeTags',
|
||||
type: 'string',
|
||||
description: 'Tags to include in the output. Use comma to separate multiple tags.',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
additionalParams: true,
|
||||
show: {
|
||||
crawlerType: ['scrape']
|
||||
}
|
||||
},
|
||||
{
|
||||
// excludeTags
|
||||
label: '[Scrape] Exclude Tags',
|
||||
label: 'Exclude Tags',
|
||||
name: 'excludeTags',
|
||||
type: 'string',
|
||||
description: 'Tags to exclude from the output. Use comma to separate multiple tags.',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
additionalParams: true,
|
||||
show: {
|
||||
crawlerType: ['scrape']
|
||||
}
|
||||
},
|
||||
{
|
||||
// onlyMainContent
|
||||
label: '[Scrape] Only Main Content',
|
||||
label: 'Only Main Content',
|
||||
name: 'onlyMainContent',
|
||||
type: 'boolean',
|
||||
description: 'Extract only the main content of the page',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
additionalParams: true,
|
||||
show: {
|
||||
crawlerType: ['scrape']
|
||||
}
|
||||
},
|
||||
{
|
||||
// limit
|
||||
label: '[Crawl] Limit',
|
||||
label: 'Limit',
|
||||
name: 'limit',
|
||||
type: 'string',
|
||||
description: 'Maximum number of pages to crawl',
|
||||
optional: true,
|
||||
additionalParams: true,
|
||||
default: '10000'
|
||||
default: '10000',
|
||||
show: {
|
||||
crawlerType: ['crawl']
|
||||
}
|
||||
},
|
||||
{
|
||||
label: '[Crawl] Include Paths',
|
||||
label: 'Include Paths',
|
||||
name: 'includePaths',
|
||||
type: 'string',
|
||||
description:
|
||||
'URL pathname regex patterns that include matching URLs in the crawl. Only the paths that match the specified patterns will be included in the response.',
|
||||
placeholder: `blog/.*, news/.*`,
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
additionalParams: true,
|
||||
show: {
|
||||
crawlerType: ['crawl']
|
||||
}
|
||||
},
|
||||
{
|
||||
label: '[Crawl] Exclude Paths',
|
||||
label: 'Exclude Paths',
|
||||
name: 'excludePaths',
|
||||
type: 'string',
|
||||
description: 'URL pathname regex patterns that exclude matching URLs from the crawl.',
|
||||
placeholder: `blog/.*, news/.*`,
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
additionalParams: true,
|
||||
show: {
|
||||
crawlerType: ['crawl']
|
||||
}
|
||||
},
|
||||
{
|
||||
label: '[Extract] Schema',
|
||||
label: 'Schema',
|
||||
name: 'extractSchema',
|
||||
type: 'json',
|
||||
description: 'JSON schema for data extraction',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
additionalParams: true,
|
||||
show: {
|
||||
crawlerType: ['extract']
|
||||
}
|
||||
},
|
||||
{
|
||||
label: '[Extract] Prompt',
|
||||
label: 'Prompt',
|
||||
name: 'extractPrompt',
|
||||
type: 'string',
|
||||
description: 'Prompt for data extraction',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
additionalParams: true,
|
||||
show: {
|
||||
crawlerType: ['extract']
|
||||
}
|
||||
},
|
||||
{
|
||||
label: '[Extract] Job ID',
|
||||
name: 'extractJobId',
|
||||
label: 'Query',
|
||||
name: 'searchQuery',
|
||||
type: 'string',
|
||||
description: 'ID of the extract job',
|
||||
description: 'Search query to find relevant content',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
show: {
|
||||
crawlerType: ['search']
|
||||
}
|
||||
},
|
||||
{
|
||||
label: 'Limit',
|
||||
name: 'searchLimit',
|
||||
type: 'string',
|
||||
description: 'Maximum number of results to return',
|
||||
optional: true,
|
||||
additionalParams: true,
|
||||
default: '5',
|
||||
show: {
|
||||
crawlerType: ['search']
|
||||
}
|
||||
},
|
||||
{
|
||||
label: 'Language',
|
||||
name: 'searchLang',
|
||||
type: 'string',
|
||||
description: 'Language code for search results (e.g., en, es, fr)',
|
||||
optional: true,
|
||||
additionalParams: true,
|
||||
default: 'en',
|
||||
show: {
|
||||
crawlerType: ['search']
|
||||
}
|
||||
},
|
||||
{
|
||||
label: 'Country',
|
||||
name: 'searchCountry',
|
||||
type: 'string',
|
||||
description: 'Country code for search results (e.g., us, uk, ca)',
|
||||
optional: true,
|
||||
additionalParams: true,
|
||||
default: 'us',
|
||||
show: {
|
||||
crawlerType: ['search']
|
||||
}
|
||||
},
|
||||
{
|
||||
label: 'Timeout',
|
||||
name: 'searchTimeout',
|
||||
type: 'number',
|
||||
description: 'Timeout in milliseconds for search operation',
|
||||
optional: true,
|
||||
additionalParams: true,
|
||||
default: 60000,
|
||||
show: {
|
||||
crawlerType: ['search']
|
||||
}
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
|
|
@ -758,6 +943,11 @@ class FireCrawl_DocumentLoaders implements INode {
|
|||
const firecrawlApiUrl = getCredentialParam('firecrawlApiUrl', credentialData, nodeData, 'https://api.firecrawl.dev')
|
||||
const output = nodeData.outputs?.output as string
|
||||
|
||||
// Validate URL only for non-search methods
|
||||
if (crawlerType !== 'search' && !url) {
|
||||
throw new Error('Firecrawl: URL is required for ' + crawlerType + ' mode')
|
||||
}
|
||||
|
||||
const includePaths = nodeData.inputs?.includePaths ? (nodeData.inputs.includePaths.split(',') as string[]) : undefined
|
||||
const excludePaths = nodeData.inputs?.excludePaths ? (nodeData.inputs.excludePaths.split(',') as string[]) : undefined
|
||||
|
||||
|
|
@ -767,9 +957,16 @@ class FireCrawl_DocumentLoaders implements INode {
|
|||
const extractSchema = nodeData.inputs?.extractSchema
|
||||
const extractPrompt = nodeData.inputs?.extractPrompt as string
|
||||
|
||||
const searchQuery = nodeData.inputs?.searchQuery as string
|
||||
const searchLimit = nodeData.inputs?.searchLimit as string
|
||||
const searchLang = nodeData.inputs?.searchLang as string
|
||||
const searchCountry = nodeData.inputs?.searchCountry as string
|
||||
const searchTimeout = nodeData.inputs?.searchTimeout as number
|
||||
|
||||
const input: FirecrawlLoaderParameters = {
|
||||
url,
|
||||
mode: crawlerType as 'crawl' | 'scrape' | 'extract',
|
||||
query: searchQuery,
|
||||
mode: crawlerType as 'crawl' | 'scrape' | 'extract' | 'search',
|
||||
apiKey: firecrawlApiToken,
|
||||
apiUrl: firecrawlApiUrl,
|
||||
params: {
|
||||
|
|
@ -785,6 +982,19 @@ class FireCrawl_DocumentLoaders implements INode {
|
|||
}
|
||||
}
|
||||
|
||||
// Add search-specific parameters only when in search mode
|
||||
if (crawlerType === 'search') {
|
||||
if (!searchQuery) {
|
||||
throw new Error('Firecrawl: Search query is required for search mode')
|
||||
}
|
||||
input.params = {
|
||||
limit: searchLimit ? parseInt(searchLimit, 10) : 5,
|
||||
lang: searchLang,
|
||||
country: searchCountry,
|
||||
timeout: searchTimeout
|
||||
}
|
||||
}
|
||||
|
||||
if (onlyMainContent === true) {
|
||||
const scrapeOptions = input.params?.scrapeOptions as any
|
||||
input.params!.scrapeOptions = {
|
||||
|
|
|
|||
|
|
@ -27,6 +27,16 @@ For example, you have a variable called "var1":
|
|||
}
|
||||
}
|
||||
\`\`\`
|
||||
|
||||
For example, when using SSE, you can use the variable "var1" in the headers:
|
||||
\`\`\`json
|
||||
{
|
||||
"url": "https://api.example.com/endpoint/sse",
|
||||
"headers": {
|
||||
"Authorization": "Bearer {{$vars.var1}}"
|
||||
}
|
||||
}
|
||||
\`\`\`
|
||||
`
|
||||
|
||||
class Custom_MCP implements INode {
|
||||
|
|
|
|||
|
|
@ -53,10 +53,29 @@ export class MCPToolkit extends BaseToolkit {
|
|||
|
||||
const baseUrl = new URL(this.serverParams.url)
|
||||
try {
|
||||
transport = new StreamableHTTPClientTransport(baseUrl)
|
||||
if (this.serverParams.headers) {
|
||||
transport = new StreamableHTTPClientTransport(baseUrl, {
|
||||
requestInit: {
|
||||
headers: this.serverParams.headers
|
||||
}
|
||||
})
|
||||
} else {
|
||||
transport = new StreamableHTTPClientTransport(baseUrl)
|
||||
}
|
||||
await client.connect(transport)
|
||||
} catch (error) {
|
||||
transport = new SSEClientTransport(baseUrl)
|
||||
if (this.serverParams.headers) {
|
||||
transport = new SSEClientTransport(baseUrl, {
|
||||
requestInit: {
|
||||
headers: this.serverParams.headers
|
||||
},
|
||||
eventSourceInit: {
|
||||
fetch: (url, init) => fetch(url, { ...init, headers: this.serverParams.headers })
|
||||
}
|
||||
})
|
||||
} else {
|
||||
transport = new SSEClientTransport(baseUrl)
|
||||
}
|
||||
await client.connect(transport)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import { WeaviateLibArgs, WeaviateStore } from '@langchain/weaviate'
|
|||
import { Document } from '@langchain/core/documents'
|
||||
import { Embeddings } from '@langchain/core/embeddings'
|
||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam, normalizeKeysRecursively } from '../../../src/utils'
|
||||
import { addMMRInputParams, resolveVectorStoreOrRetriever } from '../VectorStoreUtils'
|
||||
import { index } from '../../../src/indexing'
|
||||
import { VectorStore } from '@langchain/core/vectorstores'
|
||||
|
|
@ -175,7 +175,11 @@ class Weaviate_VectorStores implements INode {
|
|||
const finalDocs = []
|
||||
for (let i = 0; i < flattenDocs.length; i += 1) {
|
||||
if (flattenDocs[i] && flattenDocs[i].pageContent) {
|
||||
finalDocs.push(new Document(flattenDocs[i]))
|
||||
const doc = { ...flattenDocs[i] }
|
||||
if (doc.metadata) {
|
||||
doc.metadata = normalizeKeysRecursively(doc.metadata)
|
||||
}
|
||||
finalDocs.push(new Document(doc))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1216,6 +1216,35 @@ export const handleDocumentLoaderDocuments = async (loader: DocumentLoader, text
|
|||
return docs
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize special characters in key to be used in vector store
|
||||
* @param str - Key to normalize
|
||||
* @returns Normalized key
|
||||
*/
|
||||
export const normalizeSpecialChars = (str: string) => {
|
||||
return str.replace(/[^a-zA-Z0-9_]/g, '_')
|
||||
}
|
||||
|
||||
/**
|
||||
* recursively normalize object keys
|
||||
* @param data - Object to normalize
|
||||
* @returns Normalized object
|
||||
*/
|
||||
export const normalizeKeysRecursively = (data: any): any => {
|
||||
if (Array.isArray(data)) {
|
||||
return data.map(normalizeKeysRecursively)
|
||||
}
|
||||
|
||||
if (data !== null && typeof data === 'object') {
|
||||
return Object.entries(data).reduce((acc, [key, value]) => {
|
||||
const newKey = normalizeSpecialChars(key)
|
||||
acc[newKey] = normalizeKeysRecursively(value)
|
||||
return acc
|
||||
}, {} as Record<string, any>)
|
||||
}
|
||||
return data
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if OAuth2 token is expired and refresh if needed
|
||||
* @param {string} credentialId
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import { Command, Flags } from '@oclif/core'
|
||||
import path from 'path'
|
||||
import dotenv from 'dotenv'
|
||||
import path from 'path'
|
||||
import logger from '../utils/logger'
|
||||
|
||||
dotenv.config({ path: path.join(__dirname, '..', '..', '.env'), override: true })
|
||||
|
|
@ -120,7 +120,7 @@ export abstract class BaseCommand extends Command {
|
|||
logger.error('unhandledRejection: ', err)
|
||||
})
|
||||
|
||||
const { flags } = await this.parse(BaseCommand)
|
||||
const { flags } = await this.parse(this.constructor as any)
|
||||
if (flags.PORT) process.env.PORT = flags.PORT
|
||||
if (flags.CORS_ORIGINS) process.env.CORS_ORIGINS = flags.CORS_ORIGINS
|
||||
if (flags.IFRAME_ORIGINS) process.env.IFRAME_ORIGINS = flags.IFRAME_ORIGINS
|
||||
|
|
|
|||
|
|
@ -0,0 +1,80 @@
|
|||
import { Args } from '@oclif/core'
|
||||
import { QueryRunner } from 'typeorm'
|
||||
import * as DataSource from '../DataSource'
|
||||
import { User } from '../enterprise/database/entities/user.entity'
|
||||
import { getHash } from '../enterprise/utils/encryption.util'
|
||||
import { isInvalidPassword } from '../enterprise/utils/validation.util'
|
||||
import logger from '../utils/logger'
|
||||
import { BaseCommand } from './base'
|
||||
|
||||
export default class user extends BaseCommand {
|
||||
static args = {
|
||||
email: Args.string({
|
||||
description: 'Email address to search for in the user database'
|
||||
}),
|
||||
password: Args.string({
|
||||
description: 'New password for that user'
|
||||
})
|
||||
}
|
||||
|
||||
async run(): Promise<void> {
|
||||
const { args } = await this.parse(user)
|
||||
|
||||
let queryRunner: QueryRunner | undefined
|
||||
try {
|
||||
logger.info('Initializing DataSource')
|
||||
const dataSource = await DataSource.getDataSource()
|
||||
await dataSource.initialize()
|
||||
|
||||
queryRunner = dataSource.createQueryRunner()
|
||||
await queryRunner.connect()
|
||||
|
||||
if (args.email && args.password) {
|
||||
logger.info('Running resetPassword')
|
||||
await this.resetPassword(queryRunner, args.email, args.password)
|
||||
} else {
|
||||
logger.info('Running listUserEmails')
|
||||
await this.listUserEmails(queryRunner)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(error)
|
||||
} finally {
|
||||
if (queryRunner && !queryRunner.isReleased) await queryRunner.release()
|
||||
await this.gracefullyExit()
|
||||
}
|
||||
}
|
||||
|
||||
async listUserEmails(queryRunner: QueryRunner) {
|
||||
logger.info('Listing all user emails')
|
||||
const users = await queryRunner.manager.find(User, {
|
||||
select: ['email']
|
||||
})
|
||||
|
||||
const emails = users.map((user) => user.email)
|
||||
logger.info(`Email addresses: ${emails.join(', ')}`)
|
||||
logger.info(`Email count: ${emails.length}`)
|
||||
logger.info('To reset user password, run the following command: pnpm user --email "myEmail" --password "myPassword"')
|
||||
}
|
||||
|
||||
async resetPassword(queryRunner: QueryRunner, email: string, password: string) {
|
||||
logger.info(`Finding user by email: ${email}`)
|
||||
const user = await queryRunner.manager.findOne(User, {
|
||||
where: { email }
|
||||
})
|
||||
if (!user) throw new Error(`User not found with email: ${email}`)
|
||||
|
||||
if (isInvalidPassword(password)) {
|
||||
const errors = []
|
||||
if (!/(?=.*[a-z])/.test(password)) errors.push('at least one lowercase letter')
|
||||
if (!/(?=.*[A-Z])/.test(password)) errors.push('at least one uppercase letter')
|
||||
if (!/(?=.*\d)/.test(password)) errors.push('at least one number')
|
||||
if (!/(?=.*[^a-zA-Z0-9])/.test(password)) errors.push('at least one special character')
|
||||
if (password.length < 8) errors.push('minimum length of 8 characters')
|
||||
throw new Error(`Invalid password: Must contain ${errors.join(', ')}`)
|
||||
}
|
||||
|
||||
user.credential = getHash(password)
|
||||
await queryRunner.manager.save(user)
|
||||
logger.info(`Password reset for user: ${email}`)
|
||||
}
|
||||
}
|
||||
|
|
@ -7,6 +7,6 @@ export class AddSeqNoToDatasetRow1733752119696 implements MigrationInterface {
|
|||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`ALTER TABLE "dataset_row" DROP COLUMN "sequence_no";`)
|
||||
await queryRunner.query(`ALTER TABLE \`dataset_row\` DROP COLUMN \`sequence_no\``)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,6 +7,6 @@ export class AddErrorToEvaluationRun1744964560174 implements MigrationInterface
|
|||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`ALTER TABLE "evaluation_run" DROP COLUMN "errors";`)
|
||||
await queryRunner.query(`ALTER TABLE \`evaluation_run\` DROP COLUMN \`errors\`;`)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,8 +9,8 @@ export class AddSSOColumns1730519457880 implements MigrationInterface {
|
|||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`ALTER TABLE "organization" DROP COLUMN "sso_config";`)
|
||||
await queryRunner.query(`ALTER TABLE "user" DROP COLUMN "user_type";`)
|
||||
await queryRunner.query(`ALTER TABLE "login_activity" DROP COLUMN "login_mode";`)
|
||||
await queryRunner.query(`ALTER TABLE \`organization\` DROP COLUMN \`sso_config\`;`)
|
||||
await queryRunner.query(`ALTER TABLE \`user\` DROP COLUMN \`user_type\`;`)
|
||||
await queryRunner.query(`ALTER TABLE \`login_activity\` DROP COLUMN \`login_mode\`;`)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1503,7 +1503,14 @@ export const executeAgentFlow = async ({
|
|||
|
||||
try {
|
||||
if (chatflow.analytic) {
|
||||
analyticHandlers = AnalyticHandler.getInstance({ inputs: {} } as any, {
|
||||
// Override config analytics
|
||||
let analyticInputs: ICommonObject = {}
|
||||
if (overrideConfig?.analytics && Object.keys(overrideConfig.analytics).length > 0) {
|
||||
analyticInputs = {
|
||||
...overrideConfig.analytics
|
||||
}
|
||||
}
|
||||
analyticHandlers = AnalyticHandler.getInstance({ inputs: { analytics: analyticInputs } } as any, {
|
||||
orgId,
|
||||
workspaceId,
|
||||
appDataSource,
|
||||
|
|
|
|||
|
|
@ -68,6 +68,7 @@
|
|||
"rehype-raw": "^7.0.0",
|
||||
"remark-gfm": "^3.0.1",
|
||||
"remark-math": "^5.1.1",
|
||||
"showdown": "^2.1.0",
|
||||
"tippy.js": "^6.3.7",
|
||||
"uuid": "^9.0.1",
|
||||
"yup": "^0.32.9"
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ import { Dropdown } from '@/ui-component/dropdown/Dropdown'
|
|||
import { useTheme } from '@mui/material/styles'
|
||||
import assistantsApi from '@/api/assistants'
|
||||
import { baseURL } from '@/store/constant'
|
||||
import { initNode } from '@/utils/genericHelper'
|
||||
import { initNode, showHideInputParams } from '@/utils/genericHelper'
|
||||
import DocStoreInputHandler from '@/views/docstore/DocStoreInputHandler'
|
||||
import useApi from '@/hooks/useApi'
|
||||
|
||||
|
|
@ -55,6 +55,15 @@ const AgentflowGeneratorDialog = ({ show, dialogProps, onCancel, onConfirm }) =>
|
|||
const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args))
|
||||
const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args))
|
||||
|
||||
const handleChatModelDataChange = ({ inputParam, newValue }) => {
|
||||
setSelectedChatModel((prevData) => {
|
||||
const updatedData = { ...prevData }
|
||||
updatedData.inputs[inputParam.name] = newValue
|
||||
updatedData.inputParams = showHideInputParams(updatedData)
|
||||
return updatedData
|
||||
})
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
if (getChatModelsApi.data) {
|
||||
setChatModelsComponents(getChatModelsApi.data)
|
||||
|
|
@ -303,10 +312,15 @@ const AgentflowGeneratorDialog = ({ show, dialogProps, onCancel, onConfirm }) =>
|
|||
borderRadius: 2
|
||||
}}
|
||||
>
|
||||
{(selectedChatModel.inputParams ?? [])
|
||||
.filter((inputParam) => !inputParam.hidden)
|
||||
{showHideInputParams(selectedChatModel)
|
||||
.filter((inputParam) => !inputParam.hidden && inputParam.display !== false)
|
||||
.map((inputParam, index) => (
|
||||
<DocStoreInputHandler key={index} inputParam={inputParam} data={selectedChatModel} />
|
||||
<DocStoreInputHandler
|
||||
key={index}
|
||||
inputParam={inputParam}
|
||||
data={selectedChatModel}
|
||||
onNodeDataChange={handleChatModelDataChange}
|
||||
/>
|
||||
))}
|
||||
</Box>
|
||||
)}
|
||||
|
|
|
|||
|
|
@ -81,7 +81,11 @@ const NodeInfoDialog = ({ show, dialogProps, onCancel }) => {
|
|||
height: 50,
|
||||
marginRight: 10,
|
||||
borderRadius: '50%',
|
||||
backgroundColor: 'white'
|
||||
backgroundColor: 'white',
|
||||
flexShrink: 0,
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
justifyContent: 'center'
|
||||
}}
|
||||
>
|
||||
<img
|
||||
|
|
|
|||
|
|
@ -379,7 +379,11 @@ const SpeechToText = ({ dialogProps }) => {
|
|||
width: 50,
|
||||
height: 50,
|
||||
borderRadius: '50%',
|
||||
backgroundColor: 'white'
|
||||
backgroundColor: 'white',
|
||||
flexShrink: 0,
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
justifyContent: 'center'
|
||||
}}
|
||||
>
|
||||
<img
|
||||
|
|
|
|||
|
|
@ -53,7 +53,7 @@ import { baseURL } from '@/store/constant'
|
|||
import { SET_CHATFLOW, closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction } from '@/store/actions'
|
||||
|
||||
// Utils
|
||||
import { initNode } from '@/utils/genericHelper'
|
||||
import { initNode, showHideInputParams } from '@/utils/genericHelper'
|
||||
import useNotifier from '@/utils/useNotifier'
|
||||
import { toolAgentFlow } from './toolAgentFlow'
|
||||
|
||||
|
|
@ -127,6 +127,28 @@ const CustomAssistantConfigurePreview = () => {
|
|||
const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args))
|
||||
const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args))
|
||||
|
||||
const handleChatModelDataChange = ({ inputParam, newValue }) => {
|
||||
setSelectedChatModel((prevData) => {
|
||||
const updatedData = { ...prevData }
|
||||
updatedData.inputs[inputParam.name] = newValue
|
||||
updatedData.inputParams = showHideInputParams(updatedData)
|
||||
return updatedData
|
||||
})
|
||||
}
|
||||
|
||||
const handleToolDataChange =
|
||||
(toolIndex) =>
|
||||
({ inputParam, newValue }) => {
|
||||
setSelectedTools((prevTools) => {
|
||||
const updatedTools = [...prevTools]
|
||||
const updatedTool = { ...updatedTools[toolIndex] }
|
||||
updatedTool.inputs[inputParam.name] = newValue
|
||||
updatedTool.inputParams = showHideInputParams(updatedTool)
|
||||
updatedTools[toolIndex] = updatedTool
|
||||
return updatedTools
|
||||
})
|
||||
}
|
||||
|
||||
const displayWarning = () => {
|
||||
enqueueSnackbar({
|
||||
message: 'Please fill in all mandatory fields.',
|
||||
|
|
@ -1126,13 +1148,14 @@ const CustomAssistantConfigurePreview = () => {
|
|||
borderRadius: 2
|
||||
}}
|
||||
>
|
||||
{(selectedChatModel.inputParams ?? [])
|
||||
.filter((inputParam) => !inputParam.hidden)
|
||||
{showHideInputParams(selectedChatModel)
|
||||
.filter((inputParam) => !inputParam.hidden && inputParam.display !== false)
|
||||
.map((inputParam, index) => (
|
||||
<DocStoreInputHandler
|
||||
key={index}
|
||||
inputParam={inputParam}
|
||||
data={selectedChatModel}
|
||||
onNodeDataChange={handleChatModelDataChange}
|
||||
/>
|
||||
))}
|
||||
</Box>
|
||||
|
|
@ -1217,13 +1240,16 @@ const CustomAssistantConfigurePreview = () => {
|
|||
mb: 1
|
||||
}}
|
||||
>
|
||||
{(tool.inputParams ?? [])
|
||||
.filter((inputParam) => !inputParam.hidden)
|
||||
.map((inputParam, index) => (
|
||||
{showHideInputParams(tool)
|
||||
.filter(
|
||||
(inputParam) => !inputParam.hidden && inputParam.display !== false
|
||||
)
|
||||
.map((inputParam, inputIndex) => (
|
||||
<DocStoreInputHandler
|
||||
key={index}
|
||||
key={inputIndex}
|
||||
inputParam={inputParam}
|
||||
data={tool}
|
||||
onNodeDataChange={handleToolDataChange(index)}
|
||||
/>
|
||||
))}
|
||||
</Box>
|
||||
|
|
|
|||
|
|
@ -64,7 +64,9 @@ const CustomAssistantLayout = () => {
|
|||
const getImages = (details) => {
|
||||
const images = []
|
||||
if (details && details.chatModel && details.chatModel.name) {
|
||||
images.push(`${baseURL}/api/v1/node-icon/${details.chatModel.name}`)
|
||||
images.push({
|
||||
imageSrc: `${baseURL}/api/v1/node-icon/${details.chatModel.name}`
|
||||
})
|
||||
}
|
||||
return images
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ import { Handle, Position, useUpdateNodeInternals } from 'reactflow'
|
|||
import { useEffect, useRef, useState, useContext } from 'react'
|
||||
import { useSelector, useDispatch } from 'react-redux'
|
||||
import { cloneDeep } from 'lodash'
|
||||
import showdown from 'showdown'
|
||||
|
||||
// material-ui
|
||||
import { useTheme, styled } from '@mui/material/styles'
|
||||
|
|
@ -98,6 +99,13 @@ const StyledPopper = styled(Popper)({
|
|||
}
|
||||
})
|
||||
|
||||
const markdownConverter = new showdown.Converter({
|
||||
simplifiedAutoLink: true,
|
||||
strikethrough: true,
|
||||
tables: true,
|
||||
tasklists: true
|
||||
})
|
||||
|
||||
// ===========================|| NodeInputHandler ||=========================== //
|
||||
|
||||
const NodeInputHandler = ({
|
||||
|
|
@ -1389,7 +1397,12 @@ const NodeInputHandler = ({
|
|||
onCancel={() => setPromptGeneratorDialogOpen(false)}
|
||||
onConfirm={(generatedInstruction) => {
|
||||
try {
|
||||
data.inputs[inputParam.name] = generatedInstruction
|
||||
if (inputParam?.acceptVariable && window.location.href.includes('v2/agentcanvas')) {
|
||||
const htmlContent = markdownConverter.makeHtml(generatedInstruction)
|
||||
data.inputs[inputParam.name] = htmlContent
|
||||
} else {
|
||||
data.inputs[inputParam.name] = generatedInstruction
|
||||
}
|
||||
setPromptGeneratorDialogOpen(false)
|
||||
} catch (error) {
|
||||
enqueueSnackbar({
|
||||
|
|
|
|||
|
|
@ -140,7 +140,11 @@ const CredentialListDialog = ({ show, dialogProps, onCancel, onCredentialSelecte
|
|||
width: 50,
|
||||
height: 50,
|
||||
borderRadius: '50%',
|
||||
backgroundColor: 'white'
|
||||
backgroundColor: 'white',
|
||||
flexShrink: 0,
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
justifyContent: 'center'
|
||||
}}
|
||||
>
|
||||
<img
|
||||
|
|
|
|||
|
|
@ -153,7 +153,11 @@ const ComponentsListDialog = ({ show, dialogProps, onCancel, apiCall, onSelected
|
|||
width: 50,
|
||||
height: 50,
|
||||
borderRadius: '50%',
|
||||
backgroundColor: 'white'
|
||||
backgroundColor: 'white',
|
||||
flexShrink: 0,
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
justifyContent: 'center'
|
||||
}}
|
||||
>
|
||||
<img
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import PropTypes from 'prop-types'
|
||||
import { useState } from 'react'
|
||||
import { useState, useContext } from 'react'
|
||||
import { useSelector } from 'react-redux'
|
||||
|
||||
// material-ui
|
||||
|
|
@ -20,14 +20,17 @@ import { CodeEditor } from '@/ui-component/editor/CodeEditor'
|
|||
import ExpandTextDialog from '@/ui-component/dialog/ExpandTextDialog'
|
||||
import ManageScrapedLinksDialog from '@/ui-component/dialog/ManageScrapedLinksDialog'
|
||||
import CredentialInputHandler from '@/views/canvas/CredentialInputHandler'
|
||||
import { flowContext } from '@/store/context/ReactFlowContext'
|
||||
|
||||
// const
|
||||
import { FLOWISE_CREDENTIAL_ID } from '@/store/constant'
|
||||
|
||||
// ===========================|| DocStoreInputHandler ||=========================== //
|
||||
|
||||
const DocStoreInputHandler = ({ inputParam, data, disabled = false }) => {
|
||||
const DocStoreInputHandler = ({ inputParam, data, disabled = false, onNodeDataChange }) => {
|
||||
const customization = useSelector((state) => state.customization)
|
||||
const flowContextValue = useContext(flowContext)
|
||||
const nodeDataChangeHandler = onNodeDataChange || flowContextValue?.onNodeDataChange
|
||||
|
||||
const [showExpandDialog, setShowExpandDialog] = useState(false)
|
||||
const [expandDialogProps, setExpandDialogProps] = useState({})
|
||||
|
|
@ -35,6 +38,14 @@ const DocStoreInputHandler = ({ inputParam, data, disabled = false }) => {
|
|||
const [manageScrapedLinksDialogProps, setManageScrapedLinksDialogProps] = useState({})
|
||||
const [reloadTimestamp, setReloadTimestamp] = useState(Date.now().toString())
|
||||
|
||||
const handleDataChange = ({ inputParam, newValue }) => {
|
||||
data.inputs[inputParam.name] = newValue
|
||||
const allowedShowHideInputTypes = ['boolean', 'asyncOptions', 'asyncMultiOptions', 'options', 'multiOptions']
|
||||
if (allowedShowHideInputTypes.includes(inputParam.type) && nodeDataChangeHandler) {
|
||||
nodeDataChangeHandler({ nodeId: data.id, inputParam, newValue })
|
||||
}
|
||||
}
|
||||
|
||||
const onExpandDialogClicked = (value, inputParam) => {
|
||||
const dialogProps = {
|
||||
value,
|
||||
|
|
@ -149,7 +160,7 @@ const DocStoreInputHandler = ({ inputParam, data, disabled = false }) => {
|
|||
{inputParam.type === 'boolean' && (
|
||||
<SwitchInput
|
||||
disabled={disabled}
|
||||
onChange={(newValue) => (data.inputs[inputParam.name] = newValue)}
|
||||
onChange={(newValue) => handleDataChange({ inputParam, newValue })}
|
||||
value={data.inputs[inputParam.name] ?? inputParam.default ?? false}
|
||||
/>
|
||||
)}
|
||||
|
|
@ -203,7 +214,7 @@ const DocStoreInputHandler = ({ inputParam, data, disabled = false }) => {
|
|||
disabled={disabled}
|
||||
name={inputParam.name}
|
||||
options={inputParam.options}
|
||||
onSelect={(newValue) => (data.inputs[inputParam.name] = newValue)}
|
||||
onSelect={(newValue) => handleDataChange({ inputParam, newValue })}
|
||||
value={data.inputs[inputParam.name] ?? inputParam.default ?? 'choose an option'}
|
||||
/>
|
||||
)}
|
||||
|
|
@ -213,7 +224,7 @@ const DocStoreInputHandler = ({ inputParam, data, disabled = false }) => {
|
|||
disabled={disabled}
|
||||
name={inputParam.name}
|
||||
options={inputParam.options}
|
||||
onSelect={(newValue) => (data.inputs[inputParam.name] = newValue)}
|
||||
onSelect={(newValue) => handleDataChange({ inputParam, newValue })}
|
||||
value={data.inputs[inputParam.name] ?? inputParam.default ?? 'choose an option'}
|
||||
/>
|
||||
)}
|
||||
|
|
@ -230,7 +241,7 @@ const DocStoreInputHandler = ({ inputParam, data, disabled = false }) => {
|
|||
freeSolo={inputParam.freeSolo}
|
||||
multiple={inputParam.type === 'asyncMultiOptions'}
|
||||
value={data.inputs[inputParam.name] ?? inputParam.default ?? 'choose an option'}
|
||||
onSelect={(newValue) => (data.inputs[inputParam.name] = newValue)}
|
||||
onSelect={(newValue) => handleDataChange({ inputParam, newValue })}
|
||||
onCreateNew={() => addAsyncOption(inputParam.name)}
|
||||
/>
|
||||
</div>
|
||||
|
|
@ -296,7 +307,8 @@ const DocStoreInputHandler = ({ inputParam, data, disabled = false }) => {
|
|||
DocStoreInputHandler.propTypes = {
|
||||
inputParam: PropTypes.object,
|
||||
data: PropTypes.object,
|
||||
disabled: PropTypes.bool
|
||||
disabled: PropTypes.bool,
|
||||
onNodeDataChange: PropTypes.func
|
||||
}
|
||||
|
||||
export default DocStoreInputHandler
|
||||
|
|
|
|||
|
|
@ -153,7 +153,11 @@ const DocumentLoaderListDialog = ({ show, dialogProps, onCancel, onDocLoaderSele
|
|||
width: 50,
|
||||
height: 50,
|
||||
borderRadius: '50%',
|
||||
backgroundColor: 'white'
|
||||
backgroundColor: 'white',
|
||||
flexShrink: 0,
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
justifyContent: 'center'
|
||||
}}
|
||||
>
|
||||
<img
|
||||
|
|
|
|||
|
|
@ -35,7 +35,7 @@ import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackba
|
|||
import { useError } from '@/store/context/ErrorContext'
|
||||
|
||||
// Utils
|
||||
import { initNode } from '@/utils/genericHelper'
|
||||
import { initNode, showHideInputParams } from '@/utils/genericHelper'
|
||||
import useNotifier from '@/utils/useNotifier'
|
||||
|
||||
const CardWrapper = styled(MainCard)(({ theme }) => ({
|
||||
|
|
@ -98,6 +98,24 @@ const LoaderConfigPreviewChunks = () => {
|
|||
const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args))
|
||||
const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args))
|
||||
|
||||
const handleDocumentLoaderDataChange = ({ inputParam, newValue }) => {
|
||||
setSelectedDocumentLoader((prevData) => {
|
||||
const updatedData = { ...prevData }
|
||||
updatedData.inputs[inputParam.name] = newValue
|
||||
updatedData.inputParams = showHideInputParams(updatedData)
|
||||
return updatedData
|
||||
})
|
||||
}
|
||||
|
||||
const handleTextSplitterDataChange = ({ inputParam, newValue }) => {
|
||||
setSelectedTextSplitter((prevData) => {
|
||||
const updatedData = { ...prevData }
|
||||
updatedData.inputs[inputParam.name] = newValue
|
||||
updatedData.inputParams = showHideInputParams(updatedData)
|
||||
return updatedData
|
||||
})
|
||||
}
|
||||
|
||||
const onSplitterChange = (name) => {
|
||||
const textSplitter = (textSplitterNodes ?? []).find((splitter) => splitter.name === name)
|
||||
if (textSplitter) {
|
||||
|
|
@ -452,13 +470,14 @@ const LoaderConfigPreviewChunks = () => {
|
|||
</Box>
|
||||
{selectedDocumentLoader &&
|
||||
Object.keys(selectedDocumentLoader).length > 0 &&
|
||||
(selectedDocumentLoader.inputParams ?? [])
|
||||
.filter((inputParam) => !inputParam.hidden)
|
||||
showHideInputParams(selectedDocumentLoader)
|
||||
.filter((inputParam) => !inputParam.hidden && inputParam.display !== false)
|
||||
.map((inputParam, index) => (
|
||||
<DocStoreInputHandler
|
||||
key={index}
|
||||
inputParam={inputParam}
|
||||
data={selectedDocumentLoader}
|
||||
onNodeDataChange={handleDocumentLoaderDataChange}
|
||||
/>
|
||||
))}
|
||||
{textSplitterNodes && textSplitterNodes.length > 0 && (
|
||||
|
|
@ -511,10 +530,15 @@ const LoaderConfigPreviewChunks = () => {
|
|||
</>
|
||||
)}
|
||||
{Object.keys(selectedTextSplitter).length > 0 &&
|
||||
(selectedTextSplitter.inputParams ?? [])
|
||||
.filter((inputParam) => !inputParam.hidden)
|
||||
showHideInputParams(selectedTextSplitter)
|
||||
.filter((inputParam) => !inputParam.hidden && inputParam.display !== false)
|
||||
.map((inputParam, index) => (
|
||||
<DocStoreInputHandler key={index} data={selectedTextSplitter} inputParam={inputParam} />
|
||||
<DocStoreInputHandler
|
||||
key={index}
|
||||
data={selectedTextSplitter}
|
||||
inputParam={inputParam}
|
||||
onNodeDataChange={handleTextSplitterDataChange}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
</Grid>
|
||||
|
|
|
|||
|
|
@ -40,7 +40,7 @@ import Storage from '@mui/icons-material/Storage'
|
|||
import DynamicFeed from '@mui/icons-material/Filter1'
|
||||
|
||||
// utils
|
||||
import { initNode } from '@/utils/genericHelper'
|
||||
import { initNode, showHideInputParams } from '@/utils/genericHelper'
|
||||
import useNotifier from '@/utils/useNotifier'
|
||||
|
||||
// const
|
||||
|
|
@ -89,6 +89,33 @@ const VectorStoreConfigure = () => {
|
|||
const [showUpsertHistoryDetailsDialog, setShowUpsertHistoryDetailsDialog] = useState(false)
|
||||
const [upsertDetailsDialogProps, setUpsertDetailsDialogProps] = useState({})
|
||||
|
||||
const handleEmbeddingsProviderDataChange = ({ inputParam, newValue }) => {
|
||||
setSelectedEmbeddingsProvider((prevData) => {
|
||||
const updatedData = { ...prevData }
|
||||
updatedData.inputs[inputParam.name] = newValue
|
||||
updatedData.inputParams = showHideInputParams(updatedData)
|
||||
return updatedData
|
||||
})
|
||||
}
|
||||
|
||||
const handleVectorStoreProviderDataChange = ({ inputParam, newValue }) => {
|
||||
setSelectedVectorStoreProvider((prevData) => {
|
||||
const updatedData = { ...prevData }
|
||||
updatedData.inputs[inputParam.name] = newValue
|
||||
updatedData.inputParams = showHideInputParams(updatedData)
|
||||
return updatedData
|
||||
})
|
||||
}
|
||||
|
||||
const handleRecordManagerProviderDataChange = ({ inputParam, newValue }) => {
|
||||
setSelectedRecordManagerProvider((prevData) => {
|
||||
const updatedData = { ...prevData }
|
||||
updatedData.inputs[inputParam.name] = newValue
|
||||
updatedData.inputParams = showHideInputParams(updatedData)
|
||||
return updatedData
|
||||
})
|
||||
}
|
||||
|
||||
const onEmbeddingsSelected = (component) => {
|
||||
const nodeData = cloneDeep(initNode(component, uuidv4()))
|
||||
if (!showEmbeddingsListDialog && documentStore.embeddingConfig) {
|
||||
|
|
@ -599,14 +626,17 @@ const VectorStoreConfigure = () => {
|
|||
</Box>
|
||||
{selectedEmbeddingsProvider &&
|
||||
Object.keys(selectedEmbeddingsProvider).length > 0 &&
|
||||
(selectedEmbeddingsProvider.inputParams ?? [])
|
||||
.filter((inputParam) => !inputParam.hidden)
|
||||
showHideInputParams(selectedEmbeddingsProvider)
|
||||
.filter(
|
||||
(inputParam) => !inputParam.hidden && inputParam.display !== false
|
||||
)
|
||||
.map((inputParam, index) => (
|
||||
<DocStoreInputHandler
|
||||
key={index}
|
||||
data={selectedEmbeddingsProvider}
|
||||
inputParam={inputParam}
|
||||
isAdditionalParams={inputParam.additionalParams}
|
||||
onNodeDataChange={handleEmbeddingsProviderDataChange}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
|
|
@ -714,14 +744,17 @@ const VectorStoreConfigure = () => {
|
|||
</Box>
|
||||
{selectedVectorStoreProvider &&
|
||||
Object.keys(selectedVectorStoreProvider).length > 0 &&
|
||||
(selectedVectorStoreProvider.inputParams ?? [])
|
||||
.filter((inputParam) => !inputParam.hidden)
|
||||
showHideInputParams(selectedVectorStoreProvider)
|
||||
.filter(
|
||||
(inputParam) => !inputParam.hidden && inputParam.display !== false
|
||||
)
|
||||
.map((inputParam, index) => (
|
||||
<DocStoreInputHandler
|
||||
key={index}
|
||||
data={selectedVectorStoreProvider}
|
||||
inputParam={inputParam}
|
||||
isAdditionalParams={inputParam.additionalParams}
|
||||
onNodeDataChange={handleVectorStoreProviderDataChange}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
|
|
@ -837,17 +870,18 @@ const VectorStoreConfigure = () => {
|
|||
</Box>
|
||||
{selectedRecordManagerProvider &&
|
||||
Object.keys(selectedRecordManagerProvider).length > 0 &&
|
||||
(selectedRecordManagerProvider.inputParams ?? [])
|
||||
.filter((inputParam) => !inputParam.hidden)
|
||||
showHideInputParams(selectedRecordManagerProvider)
|
||||
.filter(
|
||||
(inputParam) => !inputParam.hidden && inputParam.display !== false
|
||||
)
|
||||
.map((inputParam, index) => (
|
||||
<>
|
||||
<DocStoreInputHandler
|
||||
key={index}
|
||||
data={selectedRecordManagerProvider}
|
||||
inputParam={inputParam}
|
||||
isAdditionalParams={inputParam.additionalParams}
|
||||
/>
|
||||
</>
|
||||
<DocStoreInputHandler
|
||||
key={index}
|
||||
data={selectedRecordManagerProvider}
|
||||
inputParam={inputParam}
|
||||
isAdditionalParams={inputParam.additionalParams}
|
||||
onNodeDataChange={handleRecordManagerProviderDataChange}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
</Grid>
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ import useApi from '@/hooks/useApi'
|
|||
import { useAuth } from '@/hooks/useAuth'
|
||||
import useNotifier from '@/utils/useNotifier'
|
||||
import { baseURL } from '@/store/constant'
|
||||
import { initNode } from '@/utils/genericHelper'
|
||||
import { initNode, showHideInputParams } from '@/utils/genericHelper'
|
||||
import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction } from '@/store/actions'
|
||||
|
||||
const CardWrapper = styled(MainCard)(({ theme }) => ({
|
||||
|
|
@ -84,6 +84,15 @@ const VectorStoreQuery = () => {
|
|||
const getVectorStoreNodeDetailsApi = useApi(nodesApi.getSpecificNode)
|
||||
const [selectedVectorStoreProvider, setSelectedVectorStoreProvider] = useState({})
|
||||
|
||||
const handleVectorStoreProviderDataChange = ({ inputParam, newValue }) => {
|
||||
setSelectedVectorStoreProvider((prevData) => {
|
||||
const updatedData = { ...prevData }
|
||||
updatedData.inputs[inputParam.name] = newValue
|
||||
updatedData.inputParams = showHideInputParams(updatedData)
|
||||
return updatedData
|
||||
})
|
||||
}
|
||||
|
||||
const chunkSelected = (chunkId, selectedChunkNumber) => {
|
||||
const selectedChunk = documentChunks.find((chunk) => chunk.id === chunkId)
|
||||
const dialogProps = {
|
||||
|
|
@ -354,14 +363,15 @@ const VectorStoreQuery = () => {
|
|||
</Box>
|
||||
{selectedVectorStoreProvider &&
|
||||
Object.keys(selectedVectorStoreProvider).length > 0 &&
|
||||
(selectedVectorStoreProvider.inputParams ?? [])
|
||||
.filter((inputParam) => !inputParam.hidden)
|
||||
showHideInputParams(selectedVectorStoreProvider)
|
||||
.filter((inputParam) => !inputParam.hidden && inputParam.display !== false)
|
||||
.map((inputParam, index) => (
|
||||
<DocStoreInputHandler
|
||||
key={index}
|
||||
data={selectedVectorStoreProvider}
|
||||
inputParam={inputParam}
|
||||
isAdditionalParams={inputParam.additionalParams}
|
||||
onNodeDataChange={handleVectorStoreProviderDataChange}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
|
|
|
|||
Loading…
Reference in New Issue