diff --git a/packages/components/credentials/GoogleGenerativeAI.credential.ts b/packages/components/credentials/GoogleGenerativeAI.credential.ts new file mode 100644 index 000000000..e5ad45bfa --- /dev/null +++ b/packages/components/credentials/GoogleGenerativeAI.credential.ts @@ -0,0 +1,26 @@ +import { INodeParams, INodeCredential } from '../src/Interface' + +class GoogleGenerativeAICredential implements INodeCredential { + label: string + name: string + version: number + description: string + inputs: INodeParams[] + + constructor() { + this.label = 'Google Generative AI' + this.name = 'googleGenerativeAI' + this.version = 1.0 + this.description = + 'You can get your API key from official page here.' + this.inputs = [ + { + label: 'Google AI API Key', + name: 'googleGenerativeAPIKey', + type: 'password' + } + ] + } +} + +module.exports = { credClass: GoogleGenerativeAICredential } diff --git a/packages/components/credentials/MistralApi.credential.ts b/packages/components/credentials/MistralApi.credential.ts new file mode 100644 index 000000000..a254f6659 --- /dev/null +++ b/packages/components/credentials/MistralApi.credential.ts @@ -0,0 +1,25 @@ +import { INodeParams, INodeCredential } from '../src/Interface' + +class MistralAICredential implements INodeCredential { + label: string + name: string + version: number + description: string + inputs: INodeParams[] + + constructor() { + this.label = 'MistralAI API' + this.name = 'mistralAIApi' + this.version = 1.0 + this.description = 'You can get your API key from official console here.' + this.inputs = [ + { + label: 'MistralAI API Key', + name: 'mistralAIAPIKey', + type: 'password' + } + ] + } +} + +module.exports = { credClass: MistralAICredential } diff --git a/packages/components/credentials/RedisCacheApi.credential.ts b/packages/components/credentials/RedisCacheApi.credential.ts index 4d1a2498f..2b4ad6187 100644 --- a/packages/components/credentials/RedisCacheApi.credential.ts +++ b/packages/components/credentials/RedisCacheApi.credential.ts @@ -35,6 +35,11 @@ class RedisCacheApi implements INodeCredential { name: 'redisCachePwd', type: 'password', placeholder: '' + }, + { + label: 'Use SSL', + name: 'redisCacheSslEnabled', + type: 'boolean' } ] } diff --git a/packages/components/nodes/cache/RedisCache/RedisCache.ts b/packages/components/nodes/cache/RedisCache/RedisCache.ts index 8128b6e32..4e61c239e 100644 --- a/packages/components/nodes/cache/RedisCache/RedisCache.ts +++ b/packages/components/nodes/cache/RedisCache/RedisCache.ts @@ -56,12 +56,16 @@ class RedisCache implements INode { const password = getCredentialParam('redisCachePwd', credentialData, nodeData) const portStr = getCredentialParam('redisCachePort', credentialData, nodeData) const host = getCredentialParam('redisCacheHost', credentialData, nodeData) + const sslEnabled = getCredentialParam('redisCacheSslEnabled', credentialData, nodeData) + + const tlsOptions = sslEnabled === true ? { tls: { rejectUnauthorized: false } } : {} client = new Redis({ port: portStr ? parseInt(portStr) : 6379, host, username, - password + password, + ...tlsOptions }) } else { client = new Redis(redisUrl) diff --git a/packages/components/nodes/cache/RedisCache/RedisEmbeddingsCache.ts b/packages/components/nodes/cache/RedisCache/RedisEmbeddingsCache.ts index f15869d79..fe1b4df8a 100644 --- a/packages/components/nodes/cache/RedisCache/RedisEmbeddingsCache.ts +++ b/packages/components/nodes/cache/RedisCache/RedisEmbeddingsCache.ts @@ -71,12 +71,16 @@ class RedisEmbeddingsCache implements INode { const password = getCredentialParam('redisCachePwd', credentialData, nodeData) const portStr = getCredentialParam('redisCachePort', credentialData, nodeData) const host = getCredentialParam('redisCacheHost', credentialData, nodeData) + const sslEnabled = getCredentialParam('redisCacheSslEnabled', credentialData, nodeData) + + const tlsOptions = sslEnabled === true ? { tls: { rejectUnauthorized: false } } : {} client = new Redis({ port: portStr ? parseInt(portStr) : 6379, host, username, - password + password, + ...tlsOptions }) } else { client = new Redis(redisUrl) diff --git a/packages/components/nodes/chatmodels/AWSBedrock/AWSChatBedrock.ts b/packages/components/nodes/chatmodels/AWSBedrock/AWSChatBedrock.ts index 956fcdb33..29faf5241 100644 --- a/packages/components/nodes/chatmodels/AWSBedrock/AWSChatBedrock.ts +++ b/packages/components/nodes/chatmodels/AWSBedrock/AWSChatBedrock.ts @@ -1,9 +1,9 @@ import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' -import { ChatBedrock } from 'langchain/chat_models/bedrock' +import { BedrockChat } from 'langchain/chat_models/bedrock' import { BaseBedrockInput } from 'langchain/dist/util/bedrock' import { BaseCache } from 'langchain/schema' -import { BaseLLMParams } from 'langchain/llms/base' +import { BaseChatModelParams } from 'langchain/chat_models/base' /** * I had to run the following to build the component @@ -25,14 +25,14 @@ class AWSChatBedrock_ChatModels implements INode { inputs: INodeParams[] constructor() { - this.label = 'AWS Bedrock' + this.label = 'AWS ChatBedrock' this.name = 'awsChatBedrock' this.version = 3.0 this.type = 'AWSChatBedrock' this.icon = 'awsBedrock.png' this.category = 'Chat Models' this.description = 'Wrapper around AWS Bedrock large language models that use the Chat endpoint' - this.baseClasses = [this.type, ...getBaseClasses(ChatBedrock)] + this.baseClasses = [this.type, ...getBaseClasses(BedrockChat)] this.credential = { label: 'AWS Credential', name: 'credential', @@ -102,6 +102,13 @@ class AWSChatBedrock_ChatModels implements INode { ], default: 'anthropic.claude-v2' }, + { + label: 'Custom Model Name', + name: 'customModel', + description: 'If provided, will override model selected from Model Name option', + type: 'string', + optional: true + }, { label: 'Temperature', name: 'temperature', @@ -109,6 +116,7 @@ class AWSChatBedrock_ChatModels implements INode { step: 0.1, description: 'Temperature parameter may not apply to certain model. Please check available model parameters', optional: true, + additionalParams: true, default: 0.7 }, { @@ -118,6 +126,7 @@ class AWSChatBedrock_ChatModels implements INode { step: 10, description: 'Max Tokens parameter may not apply to certain model. Please check available model parameters', optional: true, + additionalParams: true, default: 200 } ] @@ -126,14 +135,15 @@ class AWSChatBedrock_ChatModels implements INode { async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { const iRegion = nodeData.inputs?.region as string const iModel = nodeData.inputs?.model as string + const customModel = nodeData.inputs?.customModel as string const iTemperature = nodeData.inputs?.temperature as string const iMax_tokens_to_sample = nodeData.inputs?.max_tokens_to_sample as string const cache = nodeData.inputs?.cache as BaseCache const streaming = nodeData.inputs?.streaming as boolean - const obj: BaseBedrockInput & BaseLLMParams = { + const obj: BaseBedrockInput & BaseChatModelParams = { region: iRegion, - model: iModel, + model: customModel ?? iModel, maxTokens: parseInt(iMax_tokens_to_sample, 10), temperature: parseFloat(iTemperature), streaming: streaming ?? true @@ -160,7 +170,7 @@ class AWSChatBedrock_ChatModels implements INode { } if (cache) obj.cache = cache - const amazonBedrock = new ChatBedrock(obj) + const amazonBedrock = new BedrockChat(obj) return amazonBedrock } } diff --git a/packages/components/nodes/chatmodels/ChatGoogleGenerativeAI/ChatGoogleGenerativeAI.ts b/packages/components/nodes/chatmodels/ChatGoogleGenerativeAI/ChatGoogleGenerativeAI.ts new file mode 100644 index 000000000..7044645f6 --- /dev/null +++ b/packages/components/nodes/chatmodels/ChatGoogleGenerativeAI/ChatGoogleGenerativeAI.ts @@ -0,0 +1,107 @@ +import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' +import { BaseCache } from 'langchain/schema' +import { ChatGoogleGenerativeAI } from '@langchain/google-genai' + +class GoogleGenerativeAI_ChatModels implements INode { + label: string + name: string + version: number + type: string + icon: string + category: string + description: string + baseClasses: string[] + credential: INodeParams + inputs: INodeParams[] + + constructor() { + this.label = 'ChatGoogleGenerativeAI' + this.name = 'chatGoogleGenerativeAI' + this.version = 1.0 + this.type = 'ChatGoogleGenerativeAI' + this.icon = 'gemini.png' + this.category = 'Chat Models' + this.description = 'Wrapper around Google Gemini large language models that use the Chat endpoint' + this.baseClasses = [this.type, ...getBaseClasses(ChatGoogleGenerativeAI)] + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + credentialNames: ['googleGenerativeAI'], + optional: false, + description: 'Google Generative AI credential.' + } + this.inputs = [ + { + label: 'Cache', + name: 'cache', + type: 'BaseCache', + optional: true + }, + { + label: 'Model Name', + name: 'modelName', + type: 'options', + options: [ + { + label: 'gemini-pro', + name: 'gemini-pro' + } + ], + default: 'gemini-pro' + }, + { + label: 'Temperature', + name: 'temperature', + type: 'number', + step: 0.1, + default: 0.9, + optional: true + }, + { + label: 'Max Output Tokens', + name: 'maxOutputTokens', + type: 'number', + step: 1, + optional: true, + additionalParams: true + }, + { + label: 'Top Probability', + name: 'topP', + type: 'number', + step: 0.1, + optional: true, + additionalParams: true + } + ] + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const apiKey = getCredentialParam('googleGenerativeAPIKey', credentialData, nodeData) + + const temperature = nodeData.inputs?.temperature as string + const modelName = nodeData.inputs?.modelName as string + const maxOutputTokens = nodeData.inputs?.maxOutputTokens as string + const topP = nodeData.inputs?.topP as string + const cache = nodeData.inputs?.cache as BaseCache + + const obj = { + apiKey: apiKey, + modelName: modelName, + maxOutputTokens: 2048 + } + + if (maxOutputTokens) obj.maxOutputTokens = parseInt(maxOutputTokens, 10) + + const model = new ChatGoogleGenerativeAI(obj) + if (topP) model.topP = parseFloat(topP) + if (cache) model.cache = cache + if (temperature) model.temperature = parseFloat(temperature) + return model + } +} + +module.exports = { nodeClass: GoogleGenerativeAI_ChatModels } diff --git a/packages/components/nodes/chatmodels/ChatGoogleGenerativeAI/gemini.png b/packages/components/nodes/chatmodels/ChatGoogleGenerativeAI/gemini.png new file mode 100644 index 000000000..6c0d60f44 Binary files /dev/null and b/packages/components/nodes/chatmodels/ChatGoogleGenerativeAI/gemini.png differ diff --git a/packages/components/nodes/chatmodels/ChatMistral/ChatMistral.ts b/packages/components/nodes/chatmodels/ChatMistral/ChatMistral.ts new file mode 100644 index 000000000..2548dd991 --- /dev/null +++ b/packages/components/nodes/chatmodels/ChatMistral/ChatMistral.ts @@ -0,0 +1,150 @@ +import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' +import { BaseCache } from 'langchain/schema' +import { ChatMistralAI, ChatMistralAIInput } from '@langchain/mistralai' + +class ChatMistral_ChatModels implements INode { + label: string + name: string + version: number + type: string + icon: string + category: string + description: string + baseClasses: string[] + credential: INodeParams + inputs: INodeParams[] + + constructor() { + this.label = 'ChatMistralAI' + this.name = 'chatMistralAI' + this.version = 1.0 + this.type = 'ChatMistralAI' + this.icon = 'mistralai.png' + this.category = 'Chat Models' + this.description = 'Wrapper around Mistral large language models that use the Chat endpoint' + this.baseClasses = [this.type, ...getBaseClasses(ChatMistralAI)] + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + credentialNames: ['mistralAIApi'] + } + this.inputs = [ + { + label: 'Cache', + name: 'cache', + type: 'BaseCache', + optional: true + }, + { + label: 'Model Name', + name: 'modelName', + type: 'options', + options: [ + { + label: 'mistral-tiny', + name: 'mistral-tiny' + }, + { + label: 'mistral-small', + name: 'mistral-small' + }, + { + label: 'mistral-medium', + name: 'mistral-medium' + } + ], + default: 'mistral-tiny' + }, + { + label: 'Temperature', + name: 'temperature', + type: 'number', + description: + 'What sampling temperature to use, between 0.0 and 1.0. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic.', + step: 0.1, + default: 0.9, + optional: true + }, + { + label: 'Max Output Tokens', + name: 'maxOutputTokens', + type: 'number', + description: 'The maximum number of tokens to generate in the completion.', + step: 1, + optional: true, + additionalParams: true + }, + { + label: 'Top Probability', + name: 'topP', + type: 'number', + description: + 'Nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.', + step: 0.1, + optional: true, + additionalParams: true + }, + { + label: 'Random Seed', + name: 'randomSeed', + type: 'number', + description: 'The seed to use for random sampling. If set, different calls will generate deterministic results.', + step: 1, + optional: true, + additionalParams: true + }, + { + label: 'Safe Mode', + name: 'safeMode', + type: 'boolean', + description: 'Whether to inject a safety prompt before all conversations.', + optional: true, + additionalParams: true + }, + { + label: 'Override Endpoint', + name: 'overrideEndpoint', + type: 'string', + optional: true, + additionalParams: true + } + ] + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const apiKey = getCredentialParam('mistralAIAPIKey', credentialData, nodeData) + + const temperature = nodeData.inputs?.temperature as string + const modelName = nodeData.inputs?.modelName as string + const maxOutputTokens = nodeData.inputs?.maxOutputTokens as string + const topP = nodeData.inputs?.topP as string + const safeMode = nodeData.inputs?.safeMode as boolean + const randomSeed = nodeData.inputs?.safeMode as string + const overrideEndpoint = nodeData.inputs?.overrideEndpoint as string + // Waiting fix from langchain + mistral to enable streaming - https://github.com/mistralai/client-js/issues/18 + + const cache = nodeData.inputs?.cache as BaseCache + + const obj: ChatMistralAIInput = { + apiKey: apiKey, + modelName: modelName + } + + if (maxOutputTokens) obj.maxTokens = parseInt(maxOutputTokens, 10) + if (topP) obj.topP = parseFloat(topP) + if (cache) obj.cache = cache + if (temperature) obj.temperature = parseFloat(temperature) + if (randomSeed) obj.randomSeed = parseFloat(randomSeed) + if (safeMode) obj.safeMode = safeMode + if (overrideEndpoint) obj.endpoint = overrideEndpoint + + const model = new ChatMistralAI(obj) + + return model + } +} + +module.exports = { nodeClass: ChatMistral_ChatModels } diff --git a/packages/components/nodes/chatmodels/ChatMistral/mistralai.png b/packages/components/nodes/chatmodels/ChatMistral/mistralai.png new file mode 100644 index 000000000..1019f495d Binary files /dev/null and b/packages/components/nodes/chatmodels/ChatMistral/mistralai.png differ diff --git a/packages/components/nodes/embeddings/AWSBedrockEmbedding/AWSBedrockEmbedding.ts b/packages/components/nodes/embeddings/AWSBedrockEmbedding/AWSBedrockEmbedding.ts index 8249d5121..5f7ce17c6 100644 --- a/packages/components/nodes/embeddings/AWSBedrockEmbedding/AWSBedrockEmbedding.ts +++ b/packages/components/nodes/embeddings/AWSBedrockEmbedding/AWSBedrockEmbedding.ts @@ -18,7 +18,7 @@ class AWSBedrockEmbedding_Embeddings implements INode { constructor() { this.label = 'AWS Bedrock Embeddings' this.name = 'AWSBedrockEmbeddings' - this.version = 2.0 + this.version = 3.0 this.type = 'AWSBedrockEmbeddings' this.icon = 'awsBedrock.png' this.category = 'Embeddings' @@ -86,6 +86,13 @@ class AWSBedrockEmbedding_Embeddings implements INode { { label: 'cohere.embed-multilingual-v3', name: 'cohere.embed-multilingual-v3' } ], default: 'amazon.titan-embed-text-v1' + }, + { + label: 'Custom Model Name', + name: 'customModel', + description: 'If provided, will override model selected from Model Name option', + type: 'string', + optional: true } ] } @@ -93,9 +100,10 @@ class AWSBedrockEmbedding_Embeddings implements INode { async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { const iRegion = nodeData.inputs?.region as string const iModel = nodeData.inputs?.model as string + const customModel = nodeData.inputs?.customModel as string const obj: BedrockEmbeddingsParams = { - model: iModel, + model: customModel ?? iModel, region: iRegion } diff --git a/packages/components/nodes/embeddings/GoogleGenerativeAIEmbedding/GoogleGenerativeAIEmbedding.ts b/packages/components/nodes/embeddings/GoogleGenerativeAIEmbedding/GoogleGenerativeAIEmbedding.ts new file mode 100644 index 000000000..fa5cff450 --- /dev/null +++ b/packages/components/nodes/embeddings/GoogleGenerativeAIEmbedding/GoogleGenerativeAIEmbedding.ts @@ -0,0 +1,104 @@ +import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' +import { GoogleGenerativeAIEmbeddings, GoogleGenerativeAIEmbeddingsParams } from '@langchain/google-genai' +import { TaskType } from '@google/generative-ai' + +class GoogleGenerativeAIEmbedding_Embeddings implements INode { + label: string + name: string + version: number + type: string + icon: string + category: string + description: string + baseClasses: string[] + inputs: INodeParams[] + credential: INodeParams + + constructor() { + this.label = 'GoogleGenerativeAI Embeddings' + this.name = 'googleGenerativeAiEmbeddings' + this.version = 1.0 + this.type = 'GoogleGenerativeAiEmbeddings' + this.icon = 'gemini.png' + this.category = 'Embeddings' + this.description = 'Google Generative API to generate embeddings for a given text' + this.baseClasses = [this.type, ...getBaseClasses(GoogleGenerativeAIEmbeddings)] + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + credentialNames: ['googleGenerativeAI'], + optional: false, + description: 'Google Generative AI credential.' + } + this.inputs = [ + { + label: 'Model Name', + name: 'modelName', + type: 'options', + options: [ + { + label: 'embedding-001', + name: 'embedding-001' + } + ], + default: 'embedding-001' + }, + { + label: 'Task Type', + name: 'tasktype', + type: 'options', + description: 'Type of task for which the embedding will be used', + options: [ + { label: 'TASK_TYPE_UNSPECIFIED', name: 'TASK_TYPE_UNSPECIFIED' }, + { label: 'RETRIEVAL_QUERY', name: 'RETRIEVAL_QUERY' }, + { label: 'RETRIEVAL_DOCUMENT', name: 'RETRIEVAL_DOCUMENT' }, + { label: 'SEMANTIC_SIMILARITY', name: 'SEMANTIC_SIMILARITY' }, + { label: 'CLASSIFICATION', name: 'CLASSIFICATION' }, + { label: 'CLUSTERING', name: 'CLUSTERING' } + ], + default: 'TASK_TYPE_UNSPECIFIED' + } + ] + } + + // eslint-disable-next-line unused-imports/no-unused-vars + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const modelName = nodeData.inputs?.modelName as string + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const apiKey = getCredentialParam('googleGenerativeAPIKey', credentialData, nodeData) + + let taskType: TaskType + switch (nodeData.inputs?.tasktype as string) { + case 'RETRIEVAL_QUERY': + taskType = TaskType.RETRIEVAL_QUERY + break + case 'RETRIEVAL_DOCUMENT': + taskType = TaskType.RETRIEVAL_DOCUMENT + break + case 'SEMANTIC_SIMILARITY': + taskType = TaskType.SEMANTIC_SIMILARITY + break + case 'CLASSIFICATION': + taskType = TaskType.CLASSIFICATION + break + case 'CLUSTERING': + taskType = TaskType.CLUSTERING + break + default: + taskType = TaskType.TASK_TYPE_UNSPECIFIED + break + } + const obj: GoogleGenerativeAIEmbeddingsParams = { + apiKey: apiKey, + modelName: modelName, + taskType: taskType + } + + const model = new GoogleGenerativeAIEmbeddings(obj) + return model + } +} + +module.exports = { nodeClass: GoogleGenerativeAIEmbedding_Embeddings } diff --git a/packages/components/nodes/embeddings/GoogleGenerativeAIEmbedding/gemini.png b/packages/components/nodes/embeddings/GoogleGenerativeAIEmbedding/gemini.png new file mode 100644 index 000000000..6c0d60f44 Binary files /dev/null and b/packages/components/nodes/embeddings/GoogleGenerativeAIEmbedding/gemini.png differ diff --git a/packages/components/nodes/embeddings/MistralEmbedding/MistralEmbedding.ts b/packages/components/nodes/embeddings/MistralEmbedding/MistralEmbedding.ts new file mode 100644 index 000000000..d0a0198c3 --- /dev/null +++ b/packages/components/nodes/embeddings/MistralEmbedding/MistralEmbedding.ts @@ -0,0 +1,95 @@ +import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' +import { MistralAIEmbeddings, MistralAIEmbeddingsParams } from '@langchain/mistralai' + +class MistralEmbedding_Embeddings implements INode { + label: string + name: string + version: number + type: string + icon: string + category: string + description: string + baseClasses: string[] + inputs: INodeParams[] + credential: INodeParams + + constructor() { + this.label = 'MistralAI Embeddings' + this.name = 'mistralAI Embeddings' + this.version = 1.0 + this.type = 'MistralAIEmbeddings' + this.icon = 'mistralai.png' + this.category = 'Embeddings' + this.description = 'MistralAI API to generate embeddings for a given text' + this.baseClasses = [this.type, ...getBaseClasses(MistralAIEmbeddings)] + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + credentialNames: ['mistralAIApi'] + } + this.inputs = [ + { + label: 'Model Name', + name: 'modelName', + type: 'options', + options: [ + { + label: 'mistral-embed', + name: 'mistral-embed' + } + ], + default: 'mistral-embed' + }, + { + label: 'Batch Size', + name: 'batchSize', + type: 'number', + step: 1, + default: 512, + optional: true, + additionalParams: true + }, + { + label: 'Strip New Lines', + name: 'stripNewLines', + type: 'boolean', + default: true, + optional: true, + additionalParams: true + }, + { + label: 'Override Endpoint', + name: 'overrideEndpoint', + type: 'string', + optional: true, + additionalParams: true + } + ] + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const modelName = nodeData.inputs?.modelName as string + const batchSize = nodeData.inputs?.batchSize as string + const stripNewLines = nodeData.inputs?.stripNewLines as boolean + const overrideEndpoint = nodeData.inputs?.overrideEndpoint as string + + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const apiKey = getCredentialParam('mistralAIAPIKey', credentialData, nodeData) + + const obj: MistralAIEmbeddingsParams = { + apiKey: apiKey, + modelName: modelName + } + + if (batchSize) obj.batchSize = parseInt(batchSize, 10) + if (stripNewLines) obj.stripNewLines = stripNewLines + if (overrideEndpoint) obj.endpoint = overrideEndpoint + + const model = new MistralAIEmbeddings(obj) + return model + } +} + +module.exports = { nodeClass: MistralEmbedding_Embeddings } diff --git a/packages/components/nodes/embeddings/MistralEmbedding/mistralai.png b/packages/components/nodes/embeddings/MistralEmbedding/mistralai.png new file mode 100644 index 000000000..1019f495d Binary files /dev/null and b/packages/components/nodes/embeddings/MistralEmbedding/mistralai.png differ diff --git a/packages/components/nodes/llms/AWSBedrock/AWSBedrock.ts b/packages/components/nodes/llms/AWSBedrock/AWSBedrock.ts index 177a32ef9..459c42964 100644 --- a/packages/components/nodes/llms/AWSBedrock/AWSBedrock.ts +++ b/packages/components/nodes/llms/AWSBedrock/AWSBedrock.ts @@ -27,7 +27,7 @@ class AWSBedrock_LLMs implements INode { constructor() { this.label = 'AWS Bedrock' this.name = 'awsBedrock' - this.version = 2.0 + this.version = 3.0 this.type = 'AWSBedrock' this.icon = 'awsBedrock.png' this.category = 'LLMs' @@ -105,6 +105,13 @@ class AWSBedrock_LLMs implements INode { { label: 'ai21.j2-ultra', name: 'ai21.j2-ultra' } ] }, + { + label: 'Custom Model Name', + name: 'customModel', + description: 'If provided, will override model selected from Model Name option', + type: 'string', + optional: true + }, { label: 'Temperature', name: 'temperature', @@ -112,6 +119,7 @@ class AWSBedrock_LLMs implements INode { step: 0.1, description: 'Temperature parameter may not apply to certain model. Please check available model parameters', optional: true, + additionalParams: true, default: 0.7 }, { @@ -121,6 +129,7 @@ class AWSBedrock_LLMs implements INode { step: 10, description: 'Max Tokens parameter may not apply to certain model. Please check available model parameters', optional: true, + additionalParams: true, default: 200 } ] @@ -129,11 +138,12 @@ class AWSBedrock_LLMs implements INode { async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { const iRegion = nodeData.inputs?.region as string const iModel = nodeData.inputs?.model as string + const customModel = nodeData.inputs?.customModel as string const iTemperature = nodeData.inputs?.temperature as string const iMax_tokens_to_sample = nodeData.inputs?.max_tokens_to_sample as string const cache = nodeData.inputs?.cache as BaseCache const obj: Partial & BaseLLMParams = { - model: iModel, + model: customModel ?? iModel, region: iRegion, temperature: parseFloat(iTemperature), maxTokens: parseInt(iMax_tokens_to_sample, 10) diff --git a/packages/components/nodes/memory/RedisBackedChatMemory/RedisBackedChatMemory.ts b/packages/components/nodes/memory/RedisBackedChatMemory/RedisBackedChatMemory.ts index 7fe447ad5..d6ec9a114 100644 --- a/packages/components/nodes/memory/RedisBackedChatMemory/RedisBackedChatMemory.ts +++ b/packages/components/nodes/memory/RedisBackedChatMemory/RedisBackedChatMemory.ts @@ -57,6 +57,14 @@ class RedisBackedChatMemory_Memory implements INode { type: 'string', default: 'chat_history', additionalParams: true + }, + { + label: 'Window Size', + name: 'windowSize', + type: 'number', + description: 'Window of size k to surface the last k back-and-forth to use as memory.', + additionalParams: true, + optional: true } ] } @@ -89,6 +97,7 @@ const initalizeRedis = async (nodeData: INodeData, options: ICommonObject): Prom const sessionId = nodeData.inputs?.sessionId as string const sessionTTL = nodeData.inputs?.sessionTTL as number const memoryKey = nodeData.inputs?.memoryKey as string + const windowSize = nodeData.inputs?.windowSize as number const chatId = options?.chatId as string let isSessionIdUsingChatMessageId = false @@ -103,12 +112,16 @@ const initalizeRedis = async (nodeData: INodeData, options: ICommonObject): Prom const password = getCredentialParam('redisCachePwd', credentialData, nodeData) const portStr = getCredentialParam('redisCachePort', credentialData, nodeData) const host = getCredentialParam('redisCacheHost', credentialData, nodeData) + const sslEnabled = getCredentialParam('redisCacheSslEnabled', credentialData, nodeData) + + const tlsOptions = sslEnabled === true ? { tls: { rejectUnauthorized: false } } : {} client = new Redis({ port: portStr ? parseInt(portStr) : 6379, host, username, - password + password, + ...tlsOptions }) } else { client = new Redis(redisUrl) @@ -129,7 +142,7 @@ const initalizeRedis = async (nodeData: INodeData, options: ICommonObject): Prom const redisChatMessageHistory = new RedisChatMessageHistory(obj) redisChatMessageHistory.getMessages = async (): Promise => { - const rawStoredMessages = await client.lrange((redisChatMessageHistory as any).sessionId, 0, -1) + const rawStoredMessages = await client.lrange((redisChatMessageHistory as any).sessionId, windowSize ? -windowSize : 0, -1) const orderedMessages = rawStoredMessages.reverse().map((message) => JSON.parse(message)) return orderedMessages.map(mapStoredMessageToChatMessage) } diff --git a/packages/components/nodes/tools/CustomTool/core.ts b/packages/components/nodes/tools/CustomTool/core.ts index 12dd72f19..2aa06b547 100644 --- a/packages/components/nodes/tools/CustomTool/core.ts +++ b/packages/components/nodes/tools/CustomTool/core.ts @@ -2,37 +2,7 @@ import { z } from 'zod' import { CallbackManagerForToolRun } from 'langchain/callbacks' import { StructuredTool, ToolParams } from 'langchain/tools' import { NodeVM } from 'vm2' - -/* - * List of dependencies allowed to be import in vm2 - */ -const availableDependencies = [ - '@dqbd/tiktoken', - '@getzep/zep-js', - '@huggingface/inference', - '@pinecone-database/pinecone', - '@supabase/supabase-js', - 'axios', - 'cheerio', - 'chromadb', - 'cohere-ai', - 'd3-dsv', - 'form-data', - 'graphql', - 'html-to-text', - 'langchain', - 'linkifyjs', - 'mammoth', - 'moment', - 'node-fetch', - 'pdf-parse', - 'pdfjs-dist', - 'playwright', - 'puppeteer', - 'srt-parser-2', - 'typeorm', - 'weaviate-ts-client' -] +import { availableDependencies } from '../../../src/utils' export interface BaseDynamicToolInput extends ToolParams { name: string diff --git a/packages/components/nodes/utilities/CustomFunction/CustomFunction.ts b/packages/components/nodes/utilities/CustomFunction/CustomFunction.ts new file mode 100644 index 000000000..b358b24b3 --- /dev/null +++ b/packages/components/nodes/utilities/CustomFunction/CustomFunction.ts @@ -0,0 +1,124 @@ +import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' +import { NodeVM } from 'vm2' +import { availableDependencies, handleEscapeCharacters } from '../../../src/utils' + +class CustomFunction_Utilities implements INode { + label: string + name: string + version: number + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + outputs: INodeOutputsValue[] + + constructor() { + this.label = 'Custom JS Function' + this.name = 'customFunction' + this.version = 1.0 + this.type = 'CustomFunction' + this.icon = 'customfunction.svg' + this.category = 'Utilities' + this.description = `Execute custom javascript function` + this.baseClasses = [this.type, 'Utilities'] + this.inputs = [ + { + label: 'Input Variables', + name: 'functionInputVariables', + description: 'Input variables can be used in the function with prefix $. For example: $var', + type: 'json', + optional: true, + acceptVariable: true, + list: true + }, + { + label: 'Function Name', + name: 'functionName', + type: 'string', + optional: true, + placeholder: 'My Function' + }, + { + label: 'Javascript Function', + name: 'javascriptFunction', + type: 'code' + } + ] + this.outputs = [ + { + label: 'Output', + name: 'output', + baseClasses: ['string', 'number', 'boolean', 'json', 'array'] + } + ] + } + + async init(nodeData: INodeData, input: string): Promise { + const javascriptFunction = nodeData.inputs?.javascriptFunction as string + const functionInputVariablesRaw = nodeData.inputs?.functionInputVariables + + let inputVars: ICommonObject = {} + if (functionInputVariablesRaw) { + try { + inputVars = + typeof functionInputVariablesRaw === 'object' ? functionInputVariablesRaw : JSON.parse(functionInputVariablesRaw) + } catch (exception) { + throw new Error("Invalid JSON in the PromptTemplate's promptValues: " + exception) + } + } + + let sandbox: any = { $input: input } + + if (Object.keys(inputVars).length) { + for (const item in inputVars) { + sandbox[`$${item}`] = inputVars[item] + } + } + + const defaultAllowBuiltInDep = [ + 'assert', + 'buffer', + 'crypto', + 'events', + 'http', + 'https', + 'net', + 'path', + 'querystring', + 'timers', + 'tls', + 'url', + 'zlib' + ] + + const builtinDeps = process.env.TOOL_FUNCTION_BUILTIN_DEP + ? defaultAllowBuiltInDep.concat(process.env.TOOL_FUNCTION_BUILTIN_DEP.split(',')) + : defaultAllowBuiltInDep + const externalDeps = process.env.TOOL_FUNCTION_EXTERNAL_DEP ? process.env.TOOL_FUNCTION_EXTERNAL_DEP.split(',') : [] + const deps = availableDependencies.concat(externalDeps) + + const nodeVMOptions = { + console: 'inherit', + sandbox, + require: { + external: { modules: deps }, + builtin: builtinDeps + } + } as any + + const vm = new NodeVM(nodeVMOptions) + try { + const response = await vm.run(`module.exports = async function() {${javascriptFunction}}()`, __dirname) + if (typeof response === 'string') { + return handleEscapeCharacters(response, false) + } + return response + } catch (e) { + throw new Error(e) + } + } +} + +module.exports = { nodeClass: CustomFunction_Utilities } diff --git a/packages/components/nodes/utilities/CustomFunction/customfunction.svg b/packages/components/nodes/utilities/CustomFunction/customfunction.svg new file mode 100644 index 000000000..bf60fcae7 --- /dev/null +++ b/packages/components/nodes/utilities/CustomFunction/customfunction.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/components/nodes/utilities/GetVariable/GetVariable.ts b/packages/components/nodes/utilities/GetVariable/GetVariable.ts new file mode 100644 index 000000000..dde5a2d96 --- /dev/null +++ b/packages/components/nodes/utilities/GetVariable/GetVariable.ts @@ -0,0 +1,52 @@ +import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' + +class GetVariable_Utilities implements INode { + label: string + name: string + version: number + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + outputs: INodeOutputsValue[] + + constructor() { + this.label = 'Get Variable' + this.name = 'getVariable' + this.version = 1.0 + this.type = 'GetVariable' + this.icon = 'getvar.svg' + this.category = 'Utilities' + this.description = `Get variable that was saved using Set Variable node` + this.baseClasses = [this.type, 'Utilities'] + this.inputs = [ + { + label: 'Variable Name', + name: 'variableName', + type: 'string', + placeholder: 'var1' + } + ] + this.outputs = [ + { + label: 'Output', + name: 'output', + baseClasses: ['string', 'number', 'boolean', 'json', 'array'] + } + ] + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const variableName = nodeData.inputs?.variableName as string + const dynamicVars = options.dynamicVariables as Record + + if (Object.prototype.hasOwnProperty.call(dynamicVars, variableName)) { + return dynamicVars[variableName] + } + return undefined + } +} + +module.exports = { nodeClass: GetVariable_Utilities } diff --git a/packages/components/nodes/utilities/GetVariable/getvar.svg b/packages/components/nodes/utilities/GetVariable/getvar.svg new file mode 100644 index 000000000..49e27ab13 --- /dev/null +++ b/packages/components/nodes/utilities/GetVariable/getvar.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/components/nodes/utilities/SetVariable/SetVariable.ts b/packages/components/nodes/utilities/SetVariable/SetVariable.ts new file mode 100644 index 000000000..8542668ca --- /dev/null +++ b/packages/components/nodes/utilities/SetVariable/SetVariable.ts @@ -0,0 +1,56 @@ +import { INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' + +class SetVariable_Utilities implements INode { + label: string + name: string + version: number + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + outputs: INodeOutputsValue[] + + constructor() { + this.label = 'Set Variable' + this.name = 'setVariable' + this.version = 1.0 + this.type = 'SetVariable' + this.icon = 'setvar.svg' + this.category = 'Utilities' + this.description = `Set variable which can be retrieved at a later stage. Variable is only available during runtime.` + this.baseClasses = [this.type, 'Utilities'] + this.inputs = [ + { + label: 'Input', + name: 'input', + type: 'string | number | boolean | json | array', + optional: true, + list: true + }, + { + label: 'Variable Name', + name: 'variableName', + type: 'string', + placeholder: 'var1' + } + ] + this.outputs = [ + { + label: 'Output', + name: 'output', + baseClasses: ['string', 'number', 'boolean', 'json', 'array'] + } + ] + } + + async init(nodeData: INodeData): Promise { + const inputRaw = nodeData.inputs?.input + const variableName = nodeData.inputs?.variableName as string + + return { output: inputRaw, dynamicVariables: { [variableName]: inputRaw } } + } +} + +module.exports = { nodeClass: SetVariable_Utilities } diff --git a/packages/components/nodes/utilities/SetVariable/setvar.svg b/packages/components/nodes/utilities/SetVariable/setvar.svg new file mode 100644 index 000000000..c8d643c9b --- /dev/null +++ b/packages/components/nodes/utilities/SetVariable/setvar.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/components/nodes/vectorstores/Elasticsearch/Elasticsearch.ts b/packages/components/nodes/vectorstores/Elasticsearch/Elasticsearch.ts index 5f3cf2066..04c90c6b0 100644 --- a/packages/components/nodes/vectorstores/Elasticsearch/Elasticsearch.ts +++ b/packages/components/nodes/vectorstores/Elasticsearch/Elasticsearch.ts @@ -183,13 +183,26 @@ const prepareConnectionOptions = ( } else if (cloudId) { let username = getCredentialParam('username', credentialData, nodeData) let password = getCredentialParam('password', credentialData, nodeData) - elasticSearchClientOptions = { - cloud: { - id: cloudId - }, - auth: { - username: username, - password: password + if (cloudId.startsWith('http')) { + elasticSearchClientOptions = { + node: cloudId, + auth: { + username: username, + password: password + }, + tls: { + rejectUnauthorized: false + } + } + } else { + elasticSearchClientOptions = { + cloud: { + id: cloudId + }, + auth: { + username: username, + password: password + } } } } diff --git a/packages/components/package.json b/packages/components/package.json index 1874ca104..52e59b41e 100644 --- a/packages/components/package.json +++ b/packages/components/package.json @@ -26,6 +26,8 @@ "@gomomento/sdk-core": "^1.51.1", "@google-ai/generativelanguage": "^0.2.1", "@huggingface/inference": "^2.6.1", + "@langchain/google-genai": "^0.0.3", + "@langchain/mistralai": "^0.0.3", "@notionhq/client": "^2.2.8", "@opensearch-project/opensearch": "^1.2.0", "@pinecone-database/pinecone": "^1.1.1", diff --git a/packages/components/src/utils.ts b/packages/components/src/utils.ts index 404f7c75d..239b13ca8 100644 --- a/packages/components/src/utils.ts +++ b/packages/components/src/utils.ts @@ -12,6 +12,63 @@ import { AIMessage, HumanMessage } from 'langchain/schema' export const numberOrExpressionRegex = '^(\\d+\\.?\\d*|{{.*}})$' //return true if string consists only numbers OR expression {{}} export const notEmptyRegex = '(.|\\s)*\\S(.|\\s)*' //return true if string is not empty or blank +/* + * List of dependencies allowed to be import in vm2 + */ +export const availableDependencies = [ + '@aws-sdk/client-bedrock-runtime', + '@aws-sdk/client-dynamodb', + '@aws-sdk/client-s3', + '@elastic/elasticsearch', + '@dqbd/tiktoken', + '@getzep/zep-js', + '@gomomento/sdk', + '@gomomento/sdk-core', + '@google-ai/generativelanguage', + '@huggingface/inference', + '@notionhq/client', + '@opensearch-project/opensearch', + '@pinecone-database/pinecone', + '@qdrant/js-client-rest', + '@supabase/supabase-js', + '@upstash/redis', + '@zilliz/milvus2-sdk-node', + 'apify-client', + 'axios', + 'cheerio', + 'chromadb', + 'cohere-ai', + 'd3-dsv', + 'faiss-node', + 'form-data', + 'google-auth-library', + 'graphql', + 'html-to-text', + 'ioredis', + 'langchain', + 'langfuse', + 'langsmith', + 'linkifyjs', + 'llmonitor', + 'mammoth', + 'moment', + 'mongodb', + 'mysql2', + 'node-fetch', + 'node-html-markdown', + 'notion-to-md', + 'openai', + 'pdf-parse', + 'pdfjs-dist', + 'pg', + 'playwright', + 'puppeteer', + 'redis', + 'replicate', + 'srt-parser-2', + 'typeorm', + 'weaviate-ts-client' +] /** * Get base classes of components diff --git a/packages/server/marketplaces/chatflows/SQL Prompt.json b/packages/server/marketplaces/chatflows/SQL Prompt.json new file mode 100644 index 000000000..9244e8dec --- /dev/null +++ b/packages/server/marketplaces/chatflows/SQL Prompt.json @@ -0,0 +1,1237 @@ +{ + "description": "Manually construct prompts to query a SQL database", + "badge": "new", + "nodes": [ + { + "width": 300, + "height": 511, + "id": "promptTemplate_0", + "position": { + "x": 638.5481508577102, + "y": 84.0454315632386 + }, + "type": "customNode", + "data": { + "id": "promptTemplate_0", + "label": "Prompt Template", + "version": 1, + "name": "promptTemplate", + "type": "PromptTemplate", + "baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate", "Runnable"], + "category": "Prompts", + "description": "Schema to represent a basic prompt for an LLM", + "inputParams": [ + { + "label": "Template", + "name": "template", + "type": "string", + "rows": 4, + "placeholder": "What is a good name for a company that makes {product}?", + "id": "promptTemplate_0-input-template-string" + }, + { + "label": "Format Prompt Values", + "name": "promptValues", + "type": "json", + "optional": true, + "acceptVariable": true, + "list": true, + "id": "promptTemplate_0-input-promptValues-json" + } + ], + "inputAnchors": [], + "inputs": { + "template": "Based on the provided SQL table schema and question below, return a SQL SELECT ALL query that would answer the user's question. For example: SELECT * FROM table WHERE id = '1'.\n------------\nSCHEMA: {schema}\n------------\nQUESTION: {question}\n------------\nSQL QUERY:", + "promptValues": "{\"schema\":\"{{setVariable_0.data.instance}}\",\"question\":\"{{question}}\"}" + }, + "outputAnchors": [ + { + "id": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate|Runnable", + "name": "promptTemplate", + "label": "PromptTemplate", + "type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate | Runnable" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 638.5481508577102, + "y": 84.0454315632386 + }, + "dragging": false + }, + { + "width": 300, + "height": 507, + "id": "llmChain_0", + "position": { + "x": 1095.1973126620626, + "y": -83.98379829183628 + }, + "type": "customNode", + "data": { + "id": "llmChain_0", + "label": "LLM Chain", + "version": 3, + "name": "llmChain", + "type": "LLMChain", + "baseClasses": ["LLMChain", "BaseChain", "Runnable"], + "category": "Chains", + "description": "Chain to run queries against LLMs", + "inputParams": [ + { + "label": "Chain Name", + "name": "chainName", + "type": "string", + "placeholder": "Name Your Chain", + "optional": true, + "id": "llmChain_0-input-chainName-string" + } + ], + "inputAnchors": [ + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "llmChain_0-input-model-BaseLanguageModel" + }, + { + "label": "Prompt", + "name": "prompt", + "type": "BasePromptTemplate", + "id": "llmChain_0-input-prompt-BasePromptTemplate" + }, + { + "label": "Output Parser", + "name": "outputParser", + "type": "BaseLLMOutputParser", + "optional": true, + "id": "llmChain_0-input-outputParser-BaseLLMOutputParser" + }, + { + "label": "Input Moderation", + "description": "Detect text that could generate harmful output and prevent it from being sent to the language model", + "name": "inputModeration", + "type": "Moderation", + "optional": true, + "list": true, + "id": "llmChain_0-input-inputModeration-Moderation" + } + ], + "inputs": { + "model": "{{chatOpenAI_0.data.instance}}", + "prompt": "{{promptTemplate_0.data.instance}}", + "outputParser": "", + "inputModeration": "", + "chainName": "SQL Query Chain" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "llmChain_0-output-llmChain-LLMChain|BaseChain|Runnable", + "name": "llmChain", + "label": "LLM Chain", + "type": "LLMChain | BaseChain | Runnable" + }, + { + "id": "llmChain_0-output-outputPrediction-string|json", + "name": "outputPrediction", + "label": "Output Prediction", + "type": "string | json" + } + ], + "default": "llmChain" + } + ], + "outputs": { + "output": "outputPrediction" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1095.1973126620626, + "y": -83.98379829183628 + }, + "dragging": false + }, + { + "width": 300, + "height": 574, + "id": "chatOpenAI_0", + "position": { + "x": 636.5762708317321, + "y": -543.3151550847003 + }, + "type": "customNode", + "data": { + "id": "chatOpenAI_0", + "label": "ChatOpenAI", + "version": 2, + "name": "chatOpenAI", + "type": "ChatOpenAI", + "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "Runnable"], + "category": "Chat Models", + "description": "Wrapper around OpenAI large language models that use the Chat endpoint", + "inputParams": [ + { + "label": "Connect Credential", + "name": "credential", + "type": "credential", + "credentialNames": ["openAIApi"], + "id": "chatOpenAI_0-input-credential-credential" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "gpt-4", + "name": "gpt-4" + }, + { + "label": "gpt-4-1106-preview", + "name": "gpt-4-1106-preview" + }, + { + "label": "gpt-4-vision-preview", + "name": "gpt-4-vision-preview" + }, + { + "label": "gpt-4-0613", + "name": "gpt-4-0613" + }, + { + "label": "gpt-4-32k", + "name": "gpt-4-32k" + }, + { + "label": "gpt-4-32k-0613", + "name": "gpt-4-32k-0613" + }, + { + "label": "gpt-3.5-turbo", + "name": "gpt-3.5-turbo" + }, + { + "label": "gpt-3.5-turbo-1106", + "name": "gpt-3.5-turbo-1106" + }, + { + "label": "gpt-3.5-turbo-0613", + "name": "gpt-3.5-turbo-0613" + }, + { + "label": "gpt-3.5-turbo-16k", + "name": "gpt-3.5-turbo-16k" + }, + { + "label": "gpt-3.5-turbo-16k-0613", + "name": "gpt-3.5-turbo-16k-0613" + } + ], + "default": "gpt-3.5-turbo", + "optional": true, + "id": "chatOpenAI_0-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "step": 0.1, + "default": 0.9, + "optional": true, + "id": "chatOpenAI_0-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "step": 1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "step": 0.1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-topP-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "step": 0.1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "step": 0.1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-presencePenalty-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "step": 1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-basepath-string" + }, + { + "label": "BaseOptions", + "name": "baseOptions", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-baseOptions-json" + } + ], + "inputAnchors": [ + { + "label": "Cache", + "name": "cache", + "type": "BaseCache", + "optional": true, + "id": "chatOpenAI_0-input-cache-BaseCache" + } + ], + "inputs": { + "cache": "", + "modelName": "gpt-3.5-turbo-16k", + "temperature": "0", + "maxTokens": "", + "topP": "", + "frequencyPenalty": "", + "presencePenalty": "", + "timeout": "", + "basepath": "", + "baseOptions": "" + }, + "outputAnchors": [ + { + "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", + "name": "chatOpenAI", + "label": "ChatOpenAI", + "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | Runnable" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 636.5762708317321, + "y": -543.3151550847003 + }, + "dragging": false + }, + { + "width": 300, + "height": 574, + "id": "chatOpenAI_1", + "position": { + "x": 2636.1598769864936, + "y": -653.0025971757484 + }, + "type": "customNode", + "data": { + "id": "chatOpenAI_1", + "label": "ChatOpenAI", + "version": 2, + "name": "chatOpenAI", + "type": "ChatOpenAI", + "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "Runnable"], + "category": "Chat Models", + "description": "Wrapper around OpenAI large language models that use the Chat endpoint", + "inputParams": [ + { + "label": "Connect Credential", + "name": "credential", + "type": "credential", + "credentialNames": ["openAIApi"], + "id": "chatOpenAI_1-input-credential-credential" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "gpt-4", + "name": "gpt-4" + }, + { + "label": "gpt-4-1106-preview", + "name": "gpt-4-1106-preview" + }, + { + "label": "gpt-4-vision-preview", + "name": "gpt-4-vision-preview" + }, + { + "label": "gpt-4-0613", + "name": "gpt-4-0613" + }, + { + "label": "gpt-4-32k", + "name": "gpt-4-32k" + }, + { + "label": "gpt-4-32k-0613", + "name": "gpt-4-32k-0613" + }, + { + "label": "gpt-3.5-turbo", + "name": "gpt-3.5-turbo" + }, + { + "label": "gpt-3.5-turbo-1106", + "name": "gpt-3.5-turbo-1106" + }, + { + "label": "gpt-3.5-turbo-0613", + "name": "gpt-3.5-turbo-0613" + }, + { + "label": "gpt-3.5-turbo-16k", + "name": "gpt-3.5-turbo-16k" + }, + { + "label": "gpt-3.5-turbo-16k-0613", + "name": "gpt-3.5-turbo-16k-0613" + } + ], + "default": "gpt-3.5-turbo", + "optional": true, + "id": "chatOpenAI_1-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "step": 0.1, + "default": 0.9, + "optional": true, + "id": "chatOpenAI_1-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "step": 1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "step": 0.1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-topP-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "step": 0.1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "step": 0.1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-presencePenalty-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "step": 1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-basepath-string" + }, + { + "label": "BaseOptions", + "name": "baseOptions", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-baseOptions-json" + } + ], + "inputAnchors": [ + { + "label": "Cache", + "name": "cache", + "type": "BaseCache", + "optional": true, + "id": "chatOpenAI_1-input-cache-BaseCache" + } + ], + "inputs": { + "cache": "", + "modelName": "gpt-3.5-turbo-16k", + "temperature": "0", + "maxTokens": "", + "topP": "", + "frequencyPenalty": "", + "presencePenalty": "", + "timeout": "", + "basepath": "", + "baseOptions": "" + }, + "outputAnchors": [ + { + "id": "chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", + "name": "chatOpenAI", + "label": "ChatOpenAI", + "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | Runnable" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 2636.1598769864936, + "y": -653.0025971757484 + }, + "dragging": false + }, + { + "width": 300, + "height": 507, + "id": "llmChain_1", + "position": { + "x": 3089.9937691022837, + "y": -109.24001734925716 + }, + "type": "customNode", + "data": { + "id": "llmChain_1", + "label": "LLM Chain", + "version": 3, + "name": "llmChain", + "type": "LLMChain", + "baseClasses": ["LLMChain", "BaseChain", "Runnable"], + "category": "Chains", + "description": "Chain to run queries against LLMs", + "inputParams": [ + { + "label": "Chain Name", + "name": "chainName", + "type": "string", + "placeholder": "Name Your Chain", + "optional": true, + "id": "llmChain_1-input-chainName-string" + } + ], + "inputAnchors": [ + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "llmChain_1-input-model-BaseLanguageModel" + }, + { + "label": "Prompt", + "name": "prompt", + "type": "BasePromptTemplate", + "id": "llmChain_1-input-prompt-BasePromptTemplate" + }, + { + "label": "Output Parser", + "name": "outputParser", + "type": "BaseLLMOutputParser", + "optional": true, + "id": "llmChain_1-input-outputParser-BaseLLMOutputParser" + }, + { + "label": "Input Moderation", + "description": "Detect text that could generate harmful output and prevent it from being sent to the language model", + "name": "inputModeration", + "type": "Moderation", + "optional": true, + "list": true, + "id": "llmChain_1-input-inputModeration-Moderation" + } + ], + "inputs": { + "model": "{{chatOpenAI_1.data.instance}}", + "prompt": "{{promptTemplate_1.data.instance}}", + "outputParser": "", + "inputModeration": "", + "chainName": "Final Chain" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "llmChain_1-output-llmChain-LLMChain|BaseChain|Runnable", + "name": "llmChain", + "label": "LLM Chain", + "type": "LLMChain | BaseChain | Runnable" + }, + { + "id": "llmChain_1-output-outputPrediction-string|json", + "name": "outputPrediction", + "label": "Output Prediction", + "type": "string | json" + } + ], + "default": "llmChain" + } + ], + "outputs": { + "output": "llmChain" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 3089.9937691022837, + "y": -109.24001734925716 + }, + "dragging": false + }, + { + "width": 300, + "height": 669, + "id": "customFunction_2", + "position": { + "x": -152.63957160907668, + "y": -212.74538890862547 + }, + "type": "customNode", + "data": { + "id": "customFunction_2", + "label": "Custom JS Function", + "version": 1, + "name": "customFunction", + "type": "CustomFunction", + "baseClasses": ["CustomFunction", "Utilities"], + "category": "Utilities", + "description": "Execute custom javascript function", + "inputParams": [ + { + "label": "Input Variables", + "name": "functionInputVariables", + "description": "Input variables can be used in the function with prefix $. For example: $var", + "type": "json", + "optional": true, + "acceptVariable": true, + "list": true, + "id": "customFunction_2-input-functionInputVariables-json" + }, + { + "label": "Function Name", + "name": "functionName", + "type": "string", + "placeholder": "My Function", + "id": "customFunction_2-input-functionName-string" + }, + { + "label": "Javascript Function", + "name": "javascriptFunction", + "type": "code", + "id": "customFunction_2-input-javascriptFunction-code" + } + ], + "inputAnchors": [], + "inputs": { + "functionInputVariables": "", + "functionName": "Get SQL Schema Prompt", + "javascriptFunction": "const HOST = 'singlestore-host';\nconst USER = 'admin';\nconst PASSWORD = 'mypassword';\nconst DATABASE = 'mydb';\nconst TABLE = 'samples';\nconst mysql = require('mysql2/promise');\n\nlet sqlSchemaPrompt;\n\n/**\n * Ideal prompt contains schema info and examples\n * Follows best practices as specified form https://arxiv.org/abs/2204.00498\n * =========================================\n * CREATE TABLE samples (firstName varchar NOT NULL, lastName varchar)\n * SELECT * FROM samples LIMIT 3\n * firstName lastName\n * Stephen Tyler\n * Jack McGinnis\n * Steven Repici\n * =========================================\n*/\nfunction getSQLPrompt() {\n return new Promise(async (resolve, reject) => {\n \n const singleStoreConnection = mysql.createPool({\n host: HOST,\n user: USER,\n password: PASSWORD,\n database: DATABASE,\n });\n\n // Get schema info\n const [schemaInfo] = await singleStoreConnection.execute(\n `SELECT * FROM INFORMATION_SCHEMA.COLUMNS WHERE table_name = \"${TABLE}\"`\n );\n\n const createColumns = [];\n const columnNames = [];\n\n for (const schemaData of schemaInfo) {\n columnNames.push(`${schemaData['COLUMN_NAME']}`);\n createColumns.push(`${schemaData['COLUMN_NAME']} ${schemaData['COLUMN_TYPE']} ${schemaData['IS_NULLABLE'] === 'NO' ? 'NOT NULL' : ''}`);\n }\n\n const sqlCreateTableQuery = `CREATE TABLE samples (${createColumns.join(', ')})`;\n const sqlSelectTableQuery = `SELECT * FROM samples LIMIT 3`;\n\n // Get first 3 rows\n const [rows] = await singleStoreConnection.execute(\n sqlSelectTableQuery,\n );\n \n const allValues = [];\n for (const row of rows) {\n const rowValues = [];\n for (const colName in row) {\n rowValues.push(row[colName]);\n }\n allValues.push(rowValues.join(' '));\n }\n\n sqlSchemaPrompt = sqlCreateTableQuery + '\\n' + sqlSelectTableQuery + '\\n' + columnNames.join(' ') + '\\n' + allValues.join('\\n');\n \n resolve();\n });\n}\n\nasync function main() {\n await getSQLPrompt();\n}\n\nawait main();\n\nreturn sqlSchemaPrompt;" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "customFunction_2-output-output-string|number|boolean|json|array", + "name": "output", + "label": "Output", + "type": "string | number | boolean | json | array" + } + ], + "default": "output" + } + ], + "outputs": { + "output": "output" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": -152.63957160907668, + "y": -212.74538890862547 + }, + "dragging": false + }, + { + "width": 300, + "height": 669, + "id": "customFunction_1", + "position": { + "x": 1887.4670208331604, + "y": -275.95340782935716 + }, + "type": "customNode", + "data": { + "id": "customFunction_1", + "label": "Custom JS Function", + "version": 1, + "name": "customFunction", + "type": "CustomFunction", + "baseClasses": ["CustomFunction", "Utilities"], + "category": "Utilities", + "description": "Execute custom javascript function", + "inputParams": [ + { + "label": "Input Variables", + "name": "functionInputVariables", + "description": "Input variables can be used in the function with prefix $. For example: $var", + "type": "json", + "optional": true, + "acceptVariable": true, + "list": true, + "id": "customFunction_1-input-functionInputVariables-json" + }, + { + "label": "Function Name", + "name": "functionName", + "type": "string", + "placeholder": "My Function", + "id": "customFunction_1-input-functionName-string" + }, + { + "label": "Javascript Function", + "name": "javascriptFunction", + "type": "code", + "id": "customFunction_1-input-javascriptFunction-code" + } + ], + "inputAnchors": [], + "inputs": { + "functionInputVariables": "{\"sqlQuery\":\"{{setVariable_1.data.instance}}\"}", + "functionName": "Run SQL Query", + "javascriptFunction": "const HOST = 'singlestore-host';\nconst USER = 'admin';\nconst PASSWORD = 'mypassword';\nconst DATABASE = 'mydb';\nconst TABLE = 'samples';\nconst mysql = require('mysql2/promise');\n\nlet result;\n\nfunction getSQLResult() {\n return new Promise(async (resolve, reject) => {\n \n const singleStoreConnection = mysql.createPool({\n host: HOST,\n user: USER,\n password: PASSWORD,\n database: DATABASE,\n });\n \n const [rows] = await singleStoreConnection.execute(\n $sqlQuery\n );\n\n result = JSON.stringify(rows)\n \n resolve();\n });\n}\n\nasync function main() {\n await getSQLResult();\n}\n\nawait main();\n\nreturn result;" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "customFunction_1-output-output-string|number|boolean|json|array", + "name": "output", + "label": "Output", + "type": "string | number | boolean | json | array" + } + ], + "default": "output" + } + ], + "outputs": { + "output": "output" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1887.4670208331604, + "y": -275.95340782935716 + }, + "dragging": false + }, + { + "width": 300, + "height": 511, + "id": "promptTemplate_1", + "position": { + "x": 2638.3935631956588, + "y": -18.55855423639423 + }, + "type": "customNode", + "data": { + "id": "promptTemplate_1", + "label": "Prompt Template", + "version": 1, + "name": "promptTemplate", + "type": "PromptTemplate", + "baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate", "Runnable"], + "category": "Prompts", + "description": "Schema to represent a basic prompt for an LLM", + "inputParams": [ + { + "label": "Template", + "name": "template", + "type": "string", + "rows": 4, + "placeholder": "What is a good name for a company that makes {product}?", + "id": "promptTemplate_1-input-template-string" + }, + { + "label": "Format Prompt Values", + "name": "promptValues", + "type": "json", + "optional": true, + "acceptVariable": true, + "list": true, + "id": "promptTemplate_1-input-promptValues-json" + } + ], + "inputAnchors": [], + "inputs": { + "template": "Based on the table schema below, question, SQL query, and SQL response, write a natural language response, be details as possible:\n------------\nSCHEMA: {schema}\n------------\nQUESTION: {question}\n------------\nSQL QUERY: {sqlQuery}\n------------\nSQL RESPONSE: {sqlResponse}\n------------\nNATURAL LANGUAGE RESPONSE:", + "promptValues": "{\"schema\":\"{{getVariable_0.data.instance}}\",\"question\":\"{{question}}\",\"sqlResponse\":\"{{customFunction_1.data.instance}}\",\"sqlQuery\":\"{{getVariable_1.data.instance}}\"}" + }, + "outputAnchors": [ + { + "id": "promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate|Runnable", + "name": "promptTemplate", + "label": "PromptTemplate", + "type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate | Runnable" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "dragging": false, + "positionAbsolute": { + "x": 2638.3935631956588, + "y": -18.55855423639423 + } + }, + { + "width": 300, + "height": 355, + "id": "setVariable_0", + "position": { + "x": 247.02296459986826, + "y": -60.27462140472403 + }, + "type": "customNode", + "data": { + "id": "setVariable_0", + "label": "Set Variable", + "version": 1, + "name": "setVariable", + "type": "SetVariable", + "baseClasses": ["SetVariable", "Utilities"], + "category": "Utilities", + "description": "Set variable which can be retrieved at a later stage. Variable is only available during runtime.", + "inputParams": [ + { + "label": "Variable Name", + "name": "variableName", + "type": "string", + "placeholder": "var1", + "id": "setVariable_0-input-variableName-string" + } + ], + "inputAnchors": [ + { + "label": "Input", + "name": "input", + "type": "string | number | boolean | json | array", + "optional": true, + "list": true, + "id": "setVariable_0-input-input-string | number | boolean | json | array" + } + ], + "inputs": { + "input": ["{{customFunction_2.data.instance}}"], + "variableName": "schemaPrompt" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "setVariable_0-output-output-string|number|boolean|json|array", + "name": "output", + "label": "Output", + "type": "string | number | boolean | json | array" + } + ], + "default": "output" + } + ], + "outputs": { + "output": "output" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 247.02296459986826, + "y": -60.27462140472403 + }, + "dragging": false + }, + { + "width": 300, + "height": 304, + "id": "getVariable_0", + "position": { + "x": 2248.4540716891547, + "y": -47.21232652005119 + }, + "type": "customNode", + "data": { + "id": "getVariable_0", + "label": "Get Variable", + "version": 1, + "name": "getVariable", + "type": "GetVariable", + "baseClasses": ["GetVariable", "Utilities"], + "category": "Utilities", + "description": "Get variable that was saved using Set Variable node", + "inputParams": [ + { + "label": "Variable Name", + "name": "variableName", + "type": "string", + "placeholder": "var1", + "id": "getVariable_0-input-variableName-string" + } + ], + "inputAnchors": [], + "inputs": { + "variableName": "schemaPrompt" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "getVariable_0-output-output-string|number|boolean|json|array", + "name": "output", + "label": "Output", + "type": "string | number | boolean | json | array" + } + ], + "default": "output" + } + ], + "outputs": { + "output": "output" + }, + "selected": false + }, + "positionAbsolute": { + "x": 2248.4540716891547, + "y": -47.21232652005119 + }, + "selected": false, + "dragging": false + }, + { + "width": 300, + "height": 304, + "id": "getVariable_1", + "position": { + "x": 2256.0258940322105, + "y": 437.4363694364632 + }, + "type": "customNode", + "data": { + "id": "getVariable_1", + "label": "Get Variable", + "version": 1, + "name": "getVariable", + "type": "GetVariable", + "baseClasses": ["GetVariable", "Utilities"], + "category": "Utilities", + "description": "Get variable that was saved using Set Variable node", + "inputParams": [ + { + "label": "Variable Name", + "name": "variableName", + "type": "string", + "placeholder": "var1", + "id": "getVariable_1-input-variableName-string" + } + ], + "inputAnchors": [], + "inputs": { + "variableName": "sqlQuery" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "getVariable_1-output-output-string|number|boolean|json|array", + "name": "output", + "label": "Output", + "type": "string | number | boolean | json | array" + } + ], + "default": "output" + } + ], + "outputs": { + "output": "output" + }, + "selected": false + }, + "positionAbsolute": { + "x": 2256.0258940322105, + "y": 437.4363694364632 + }, + "selected": false, + "dragging": false + }, + { + "width": 300, + "height": 355, + "id": "setVariable_1", + "position": { + "x": 1482.8091395089693, + "y": -33.943355212355016 + }, + "type": "customNode", + "data": { + "id": "setVariable_1", + "label": "Set Variable", + "version": 1, + "name": "setVariable", + "type": "SetVariable", + "baseClasses": ["SetVariable", "Utilities"], + "category": "Utilities", + "description": "Set variable which can be retrieved at a later stage. Variable is only available during runtime.", + "inputParams": [ + { + "label": "Variable Name", + "name": "variableName", + "type": "string", + "placeholder": "var1", + "id": "setVariable_1-input-variableName-string" + } + ], + "inputAnchors": [ + { + "label": "Input", + "name": "input", + "type": "string | number | boolean | json | array", + "optional": true, + "list": true, + "id": "setVariable_1-input-input-string | number | boolean | json | array" + } + ], + "inputs": { + "input": ["{{llmChain_0.data.instance}}"], + "variableName": "sqlQuery" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "setVariable_1-output-output-string|number|boolean|json|array", + "name": "output", + "label": "Output", + "type": "string | number | boolean | json | array" + } + ], + "default": "output" + } + ], + "outputs": { + "output": "output" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1482.8091395089693, + "y": -33.943355212355016 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "promptTemplate_0", + "sourceHandle": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate|Runnable", + "target": "llmChain_0", + "targetHandle": "llmChain_0-input-prompt-BasePromptTemplate", + "type": "buttonedge", + "id": "promptTemplate_0-promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate|Runnable-llmChain_0-llmChain_0-input-prompt-BasePromptTemplate", + "data": { + "label": "" + } + }, + { + "source": "chatOpenAI_0", + "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", + "target": "llmChain_0", + "targetHandle": "llmChain_0-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-llmChain_0-llmChain_0-input-model-BaseLanguageModel", + "data": { + "label": "" + } + }, + { + "source": "chatOpenAI_1", + "sourceHandle": "chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", + "target": "llmChain_1", + "targetHandle": "llmChain_1-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "chatOpenAI_1-chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-llmChain_1-llmChain_1-input-model-BaseLanguageModel", + "data": { + "label": "" + } + }, + { + "source": "customFunction_1", + "sourceHandle": "customFunction_1-output-output-string|number|boolean|json|array", + "target": "promptTemplate_1", + "targetHandle": "promptTemplate_1-input-promptValues-json", + "type": "buttonedge", + "id": "customFunction_1-customFunction_1-output-output-string|number|boolean|json|array-promptTemplate_1-promptTemplate_1-input-promptValues-json", + "data": { + "label": "" + } + }, + { + "source": "promptTemplate_1", + "sourceHandle": "promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate|Runnable", + "target": "llmChain_1", + "targetHandle": "llmChain_1-input-prompt-BasePromptTemplate", + "type": "buttonedge", + "id": "promptTemplate_1-promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate|Runnable-llmChain_1-llmChain_1-input-prompt-BasePromptTemplate", + "data": { + "label": "" + } + }, + { + "source": "customFunction_2", + "sourceHandle": "customFunction_2-output-output-string|number|boolean|json|array", + "target": "setVariable_0", + "targetHandle": "setVariable_0-input-input-string | number | boolean | json | array", + "type": "buttonedge", + "id": "customFunction_2-customFunction_2-output-output-string|number|boolean|json|array-setVariable_0-setVariable_0-input-input-string | number | boolean | json | array", + "data": { + "label": "" + } + }, + { + "source": "setVariable_0", + "sourceHandle": "setVariable_0-output-output-string|number|boolean|json|array", + "target": "promptTemplate_0", + "targetHandle": "promptTemplate_0-input-promptValues-json", + "type": "buttonedge", + "id": "setVariable_0-setVariable_0-output-output-string|number|boolean|json|array-promptTemplate_0-promptTemplate_0-input-promptValues-json", + "data": { + "label": "" + } + }, + { + "source": "getVariable_0", + "sourceHandle": "getVariable_0-output-output-string|number|boolean|json|array", + "target": "promptTemplate_1", + "targetHandle": "promptTemplate_1-input-promptValues-json", + "type": "buttonedge", + "id": "getVariable_0-getVariable_0-output-output-string|number|boolean|json|array-promptTemplate_1-promptTemplate_1-input-promptValues-json", + "data": { + "label": "" + } + }, + { + "source": "getVariable_1", + "sourceHandle": "getVariable_1-output-output-string|number|boolean|json|array", + "target": "promptTemplate_1", + "targetHandle": "promptTemplate_1-input-promptValues-json", + "type": "buttonedge", + "id": "getVariable_1-getVariable_1-output-output-string|number|boolean|json|array-promptTemplate_1-promptTemplate_1-input-promptValues-json", + "data": { + "label": "" + } + }, + { + "source": "llmChain_0", + "sourceHandle": "llmChain_0-output-outputPrediction-string|json", + "target": "setVariable_1", + "targetHandle": "setVariable_1-input-input-string | number | boolean | json | array", + "type": "buttonedge", + "id": "llmChain_0-llmChain_0-output-outputPrediction-string|json-setVariable_1-setVariable_1-input-input-string | number | boolean | json | array", + "data": { + "label": "" + } + }, + { + "source": "setVariable_1", + "sourceHandle": "setVariable_1-output-output-string|number|boolean|json|array", + "target": "customFunction_1", + "targetHandle": "customFunction_1-input-functionInputVariables-json", + "type": "buttonedge", + "id": "setVariable_1-setVariable_1-output-output-string|number|boolean|json|array-customFunction_1-customFunction_1-input-functionInputVariables-json", + "data": { + "label": "" + } + } + ] +} diff --git a/packages/server/src/index.ts b/packages/server/src/index.ts index fb4a5f5a0..9c31a3337 100644 --- a/packages/server/src/index.ts +++ b/packages/server/src/index.ts @@ -55,7 +55,7 @@ import { Tool } from './database/entities/Tool' import { Assistant } from './database/entities/Assistant' import { ChatflowPool } from './ChatflowPool' import { CachePool } from './CachePool' -import { ICommonObject, IMessage, INodeOptionsValue } from 'flowise-components' +import { ICommonObject, IMessage, INodeOptionsValue, handleEscapeCharacters } from 'flowise-components' import { createRateLimiter, getRateLimiter, initializeRateLimiter } from './utils/rateLimit' import { addAPIKey, compareKeys, deleteAPIKey, getApiKey, getAPIKeys, updateAPIKey } from './utils/apiKey' import { sanitizeMiddleware } from './utils/XSS' @@ -281,6 +281,29 @@ export class App { } }) + // execute custom function node + this.app.post('/api/v1/node-custom-function', async (req: Request, res: Response) => { + const body = req.body + const nodeData = { inputs: body } + if (Object.prototype.hasOwnProperty.call(this.nodesPool.componentNodes, 'customFunction')) { + try { + const nodeInstanceFilePath = this.nodesPool.componentNodes['customFunction'].filePath as string + const nodeModule = await import(nodeInstanceFilePath) + const newNodeInstance = new nodeModule.nodeClass() + + const returnData = await newNodeInstance.init(nodeData) + const result = typeof returnData === 'string' ? handleEscapeCharacters(returnData, true) : returnData + + return res.json(result) + } catch (error) { + return res.status(500).send(`Error running custom function: ${error}`) + } + } else { + res.status(404).send(`Node customFunction not found`) + return + } + }) + // ---------------------------------------- // Chatflows // ---------------------------------------- diff --git a/packages/server/src/utils/index.ts b/packages/server/src/utils/index.ts index 2bf1c04a4..8fa9e8d03 100644 --- a/packages/server/src/utils/index.ts +++ b/packages/server/src/utils/index.ts @@ -231,6 +231,7 @@ export const buildLangchain = async ( // Create a Queue and add our initial node in it const nodeQueue = [] as INodeQueue[] const exploredNode = {} as IExploredNode + const dynamicVariables = {} as Record // In the case of infinite loop, only max 3 loops will be executed const maxLoop = 3 @@ -267,20 +268,36 @@ export const buildLangchain = async ( appDataSource, databaseEntities, logger, - cachePool + cachePool, + dynamicVariables }) logger.debug(`[server]: Finished upserting ${reactFlowNode.data.label} (${reactFlowNode.data.id})`) break } else { logger.debug(`[server]: Initializing ${reactFlowNode.data.label} (${reactFlowNode.data.id})`) - flowNodes[nodeIndex].data.instance = await newNodeInstance.init(reactFlowNodeData, question, { + let outputResult = await newNodeInstance.init(reactFlowNodeData, question, { chatId, chatflowid, appDataSource, databaseEntities, logger, - cachePool + cachePool, + dynamicVariables }) + + // Save dynamic variables + if (reactFlowNode.data.name === 'setVariable') { + const dynamicVars = outputResult?.dynamicVariables ?? {} + + for (const variableKey in dynamicVars) { + dynamicVariables[variableKey] = dynamicVars[variableKey] + } + + outputResult = outputResult?.output + } + + flowNodes[nodeIndex].data.instance = outputResult + logger.debug(`[server]: Finished initializing ${reactFlowNode.data.label} (${reactFlowNode.data.id})`) } } catch (e: any) { @@ -711,6 +728,7 @@ export const findAvailableConfigs = (reactFlowNodes: IReactFlowNode[], component /** * Check to see if flow valid for stream + * TODO: perform check from component level. i.e: set streaming on component, and check here * @param {IReactFlowNode[]} reactFlowNodes * @param {INodeData} endingNodeData * @returns {boolean} diff --git a/packages/ui/package.json b/packages/ui/package.json index 7a739978e..2aed7d977 100644 --- a/packages/ui/package.json +++ b/packages/ui/package.json @@ -8,13 +8,20 @@ "email": "henryheng@flowiseai.com" }, "dependencies": { + "@codemirror/lang-javascript": "^6.2.1", + "@codemirror/lang-json": "^6.0.1", + "@codemirror/view": "^6.22.3", "@emotion/cache": "^11.4.0", "@emotion/react": "^11.10.6", "@emotion/styled": "^11.10.6", "@mui/icons-material": "^5.0.3", - "@mui/material": "^5.11.12", + "@mui/lab": "^5.0.0-alpha.156", + "@mui/material": "^5.15.0", "@mui/x-data-grid": "^6.8.0", "@tabler/icons": "^1.39.1", + "@uiw/codemirror-theme-sublime": "^4.21.21", + "@uiw/codemirror-theme-vscode": "^4.21.21", + "@uiw/react-codemirror": "^4.21.21", "clsx": "^1.1.1", "flowise-embed": "*", "flowise-embed-react": "*", @@ -26,7 +33,6 @@ "lodash": "^4.17.21", "moment": "^2.29.3", "notistack": "^2.0.4", - "prismjs": "^1.28.0", "prop-types": "^15.7.2", "react": "^18.2.0", "react-code-blocks": "^0.0.9-0", @@ -39,7 +45,6 @@ "react-redux": "^8.0.5", "react-router": "~6.3.0", "react-router-dom": "~6.3.0", - "react-simple-code-editor": "^0.11.2", "react-syntax-highlighter": "^15.5.0", "reactflow": "^11.5.6", "redux": "^4.0.5", diff --git a/packages/ui/src/api/nodes.js b/packages/ui/src/api/nodes.js index 7eb4c3518..3b7eacc5e 100644 --- a/packages/ui/src/api/nodes.js +++ b/packages/ui/src/api/nodes.js @@ -4,7 +4,10 @@ const getAllNodes = () => client.get('/nodes') const getSpecificNode = (name) => client.get(`/nodes/${name}`) +const executeCustomFunctionNode = (body) => client.post(`/node-custom-function`, body) + export default { getAllNodes, - getSpecificNode + getSpecificNode, + executeCustomFunctionNode } diff --git a/packages/ui/src/ui-component/dialog/ExpandTextDialog.js b/packages/ui/src/ui-component/dialog/ExpandTextDialog.js index 2a4ec4f5a..0ef70e29e 100644 --- a/packages/ui/src/ui-component/dialog/ExpandTextDialog.js +++ b/packages/ui/src/ui-component/dialog/ExpandTextDialog.js @@ -2,14 +2,24 @@ import { createPortal } from 'react-dom' import { useState, useEffect } from 'react' import { useSelector, useDispatch } from 'react-redux' import PropTypes from 'prop-types' +import PerfectScrollbar from 'react-perfect-scrollbar' + +// MUI import { Button, Dialog, DialogActions, DialogContent, Typography } from '@mui/material' import { useTheme } from '@mui/material/styles' -import PerfectScrollbar from 'react-perfect-scrollbar' +import { LoadingButton } from '@mui/lab' + +// Project Import import { StyledButton } from 'ui-component/button/StyledButton' -import { DarkCodeEditor } from 'ui-component/editor/DarkCodeEditor' -import { LightCodeEditor } from 'ui-component/editor/LightCodeEditor' +import { CodeEditor } from 'ui-component/editor/CodeEditor' + +// Store import { HIDE_CANVAS_DIALOG, SHOW_CANVAS_DIALOG } from 'store/actions' +// API +import nodesApi from 'api/nodes' +import useApi from 'hooks/useApi' + import './ExpandTextDialog.css' const ExpandTextDialog = ({ show, dialogProps, onCancel, onConfirm }) => { @@ -18,18 +28,30 @@ const ExpandTextDialog = ({ show, dialogProps, onCancel, onConfirm }) => { const theme = useTheme() const dispatch = useDispatch() const customization = useSelector((state) => state.customization) - const languageType = 'json' const [inputValue, setInputValue] = useState('') const [inputParam, setInputParam] = useState(null) + const [languageType, setLanguageType] = useState('json') + const [loading, setLoading] = useState(false) + const [codeExecutedResult, setCodeExecutedResult] = useState('') + + const executeCustomFunctionNodeApi = useApi(nodesApi.executeCustomFunctionNode) useEffect(() => { if (dialogProps.value) setInputValue(dialogProps.value) - if (dialogProps.inputParam) setInputParam(dialogProps.inputParam) + if (dialogProps.inputParam) { + setInputParam(dialogProps.inputParam) + if (dialogProps.inputParam.type === 'code') { + setLanguageType('js') + } + } return () => { setInputValue('') + setLoading(false) setInputParam(null) + setLanguageType('json') + setCodeExecutedResult('') } }, [dialogProps]) @@ -39,11 +61,31 @@ const ExpandTextDialog = ({ show, dialogProps, onCancel, onConfirm }) => { return () => dispatch({ type: HIDE_CANVAS_DIALOG }) }, [show, dispatch]) + useEffect(() => { + setLoading(executeCustomFunctionNodeApi.loading) + }, [executeCustomFunctionNodeApi.loading]) + + useEffect(() => { + if (executeCustomFunctionNodeApi.data) { + setCodeExecutedResult(executeCustomFunctionNodeApi.data) + } + }, [executeCustomFunctionNodeApi.data]) + + useEffect(() => { + if (executeCustomFunctionNodeApi.error) { + if (typeof executeCustomFunctionNodeApi.error === 'object' && executeCustomFunctionNodeApi.error?.response?.data) { + setCodeExecutedResult(executeCustomFunctionNodeApi.error?.response?.data) + } else if (typeof executeCustomFunctionNodeApi.error === 'string') { + setCodeExecutedResult(executeCustomFunctionNodeApi.error) + } + } + }, [executeCustomFunctionNodeApi.error]) + const component = show ? (
- {inputParam && inputParam.type === 'string' && ( + {inputParam && (inputParam.type === 'string' || inputParam.type === 'code') && (
{inputParam.label} @@ -54,42 +96,66 @@ const ExpandTextDialog = ({ show, dialogProps, onCancel, onConfirm }) => { borderColor: theme.palette.grey['500'], borderRadius: '12px', height: '100%', - maxHeight: 'calc(100vh - 220px)', + maxHeight: languageType === 'js' ? 'calc(100vh - 250px)' : 'calc(100vh - 220px)', overflowX: 'hidden', backgroundColor: 'white' }} > - {customization.isDarkMode ? ( - setInputValue(code)} - placeholder={inputParam.placeholder} - type={languageType} - style={{ - fontSize: '0.875rem', - minHeight: 'calc(100vh - 220px)', - width: '100%' - }} - /> - ) : ( - setInputValue(code)} - placeholder={inputParam.placeholder} - type={languageType} - style={{ - fontSize: '0.875rem', - minHeight: 'calc(100vh - 220px)', - width: '100%' - }} - /> - )} + setInputValue(code)} + />
)}
+ {languageType === 'js' && ( + { + setLoading(true) + executeCustomFunctionNodeApi.request({ javascriptFunction: inputValue }) + }} + > + Execute + + )} + {codeExecutedResult && ( +
+ +
+ )}
diff --git a/packages/ui/src/ui-component/editor/CodeEditor.js b/packages/ui/src/ui-component/editor/CodeEditor.js new file mode 100644 index 000000000..120e19a01 --- /dev/null +++ b/packages/ui/src/ui-component/editor/CodeEditor.js @@ -0,0 +1,48 @@ +import PropTypes from 'prop-types' +import CodeMirror from '@uiw/react-codemirror' +import { javascript } from '@codemirror/lang-javascript' +import { json } from '@codemirror/lang-json' +import { vscodeDark } from '@uiw/codemirror-theme-vscode' +import { sublime } from '@uiw/codemirror-theme-sublime' +import { EditorView } from '@codemirror/view' + +export const CodeEditor = ({ value, height, theme, lang, placeholder, disabled = false, basicSetup = {}, onValueChange }) => { + const customStyle = EditorView.baseTheme({ + '&': { + color: '#191b1f', + padding: '10px' + }, + '.cm-placeholder': { + color: 'rgba(120, 120, 120, 0.5)' + } + }) + + return ( + + ) +} + +CodeEditor.propTypes = { + value: PropTypes.string, + height: PropTypes.string, + theme: PropTypes.string, + lang: PropTypes.string, + placeholder: PropTypes.string, + disabled: PropTypes.bool, + basicSetup: PropTypes.object, + onValueChange: PropTypes.func +} diff --git a/packages/ui/src/ui-component/editor/DarkCodeEditor.js b/packages/ui/src/ui-component/editor/DarkCodeEditor.js deleted file mode 100644 index bf0719dd9..000000000 --- a/packages/ui/src/ui-component/editor/DarkCodeEditor.js +++ /dev/null @@ -1,43 +0,0 @@ -import Editor from 'react-simple-code-editor' -import { highlight, languages } from 'prismjs/components/prism-core' -import 'prismjs/components/prism-clike' -import 'prismjs/components/prism-javascript' -import 'prismjs/components/prism-json' -import 'prismjs/components/prism-markup' -import './prism-dark.css' -import PropTypes from 'prop-types' -import { useTheme } from '@mui/material/styles' - -export const DarkCodeEditor = ({ value, placeholder, disabled = false, type, style, onValueChange, onMouseUp, onBlur }) => { - const theme = useTheme() - - return ( - highlight(code, type === 'json' ? languages.json : languages.js)} - padding={10} - onValueChange={onValueChange} - onMouseUp={onMouseUp} - onBlur={onBlur} - tabSize={4} - style={{ - ...style, - background: theme.palette.codeEditor.main - }} - textareaClassName='editor__textarea' - /> - ) -} - -DarkCodeEditor.propTypes = { - value: PropTypes.string, - placeholder: PropTypes.string, - disabled: PropTypes.bool, - type: PropTypes.string, - style: PropTypes.object, - onValueChange: PropTypes.func, - onMouseUp: PropTypes.func, - onBlur: PropTypes.func -} diff --git a/packages/ui/src/ui-component/editor/LightCodeEditor.js b/packages/ui/src/ui-component/editor/LightCodeEditor.js deleted file mode 100644 index 14dcbf29a..000000000 --- a/packages/ui/src/ui-component/editor/LightCodeEditor.js +++ /dev/null @@ -1,43 +0,0 @@ -import Editor from 'react-simple-code-editor' -import { highlight, languages } from 'prismjs/components/prism-core' -import 'prismjs/components/prism-clike' -import 'prismjs/components/prism-javascript' -import 'prismjs/components/prism-json' -import 'prismjs/components/prism-markup' -import './prism-light.css' -import PropTypes from 'prop-types' -import { useTheme } from '@mui/material/styles' - -export const LightCodeEditor = ({ value, placeholder, disabled = false, type, style, onValueChange, onMouseUp, onBlur }) => { - const theme = useTheme() - - return ( - highlight(code, type === 'json' ? languages.json : languages.js)} - padding={10} - onValueChange={onValueChange} - onMouseUp={onMouseUp} - onBlur={onBlur} - tabSize={4} - style={{ - ...style, - background: theme.palette.card.main - }} - textareaClassName='editor__textarea' - /> - ) -} - -LightCodeEditor.propTypes = { - value: PropTypes.string, - placeholder: PropTypes.string, - disabled: PropTypes.bool, - type: PropTypes.string, - style: PropTypes.object, - onValueChange: PropTypes.func, - onMouseUp: PropTypes.func, - onBlur: PropTypes.func -} diff --git a/packages/ui/src/ui-component/editor/prism-dark.css b/packages/ui/src/ui-component/editor/prism-dark.css deleted file mode 100644 index c4bfb4132..000000000 --- a/packages/ui/src/ui-component/editor/prism-dark.css +++ /dev/null @@ -1,275 +0,0 @@ -pre[class*='language-'], -code[class*='language-'] { - color: #d4d4d4; - font-size: 13px; - text-shadow: none; - font-family: Menlo, Monaco, Consolas, 'Andale Mono', 'Ubuntu Mono', 'Courier New', monospace; - direction: ltr; - text-align: left; - white-space: pre; - word-spacing: normal; - word-break: normal; - line-height: 1.5; - -moz-tab-size: 4; - -o-tab-size: 4; - tab-size: 4; - -webkit-hyphens: none; - -moz-hyphens: none; - -ms-hyphens: none; - hyphens: none; -} - -pre[class*='language-']::selection, -code[class*='language-']::selection, -pre[class*='language-'] *::selection, -code[class*='language-'] *::selection { - text-shadow: none; - background: #264f78; -} - -@media print { - pre[class*='language-'], - code[class*='language-'] { - text-shadow: none; - } -} - -pre[class*='language-'] { - padding: 1em; - margin: 0.5em 0; - overflow: auto; - background: #1e1e1e; -} - -:not(pre) > code[class*='language-'] { - padding: 0.1em 0.3em; - border-radius: 0.3em; - color: #db4c69; - background: #1e1e1e; -} -/********************************************************* -* Tokens -*/ -.namespace { - opacity: 0.7; -} - -.token.doctype .token.doctype-tag { - color: #569cd6; -} - -.token.doctype .token.name { - color: #9cdcfe; -} - -.token.comment, -.token.prolog { - color: #6a9955; -} - -.token.punctuation, -.language-html .language-css .token.punctuation, -.language-html .language-javascript .token.punctuation { - color: #d4d4d4; -} - -.token.property, -.token.tag, -.token.boolean, -.token.number, -.token.constant, -.token.symbol, -.token.inserted, -.token.unit { - color: #b5cea8; -} - -.token.selector, -.token.attr-name, -.token.string, -.token.char, -.token.builtin, -.token.deleted { - color: #ce9178; -} - -.language-css .token.string.url { - text-decoration: underline; -} - -.token.operator, -.token.entity { - color: #d4d4d4; -} - -.token.operator.arrow { - color: #569cd6; -} - -.token.atrule { - color: #ce9178; -} - -.token.atrule .token.rule { - color: #c586c0; -} - -.token.atrule .token.url { - color: #9cdcfe; -} - -.token.atrule .token.url .token.function { - color: #dcdcaa; -} - -.token.atrule .token.url .token.punctuation { - color: #d4d4d4; -} - -.token.keyword { - color: #569cd6; -} - -.token.keyword.module, -.token.keyword.control-flow { - color: #c586c0; -} - -.token.function, -.token.function .token.maybe-class-name { - color: #dcdcaa; -} - -.token.regex { - color: #d16969; -} - -.token.important { - color: #569cd6; -} - -.token.italic { - font-style: italic; -} - -.token.constant { - color: #9cdcfe; -} - -.token.class-name, -.token.maybe-class-name { - color: #4ec9b0; -} - -.token.console { - color: #9cdcfe; -} - -.token.parameter { - color: #9cdcfe; -} - -.token.interpolation { - color: #9cdcfe; -} - -.token.punctuation.interpolation-punctuation { - color: #569cd6; -} - -.token.boolean { - color: #569cd6; -} - -.token.property, -.token.variable, -.token.imports .token.maybe-class-name, -.token.exports .token.maybe-class-name { - color: #9cdcfe; -} - -.token.selector { - color: #d7ba7d; -} - -.token.escape { - color: #d7ba7d; -} - -.token.tag { - color: #569cd6; -} - -.token.tag .token.punctuation { - color: #808080; -} - -.token.cdata { - color: #808080; -} - -.token.attr-name { - color: #9cdcfe; -} - -.token.attr-value, -.token.attr-value .token.punctuation { - color: #ce9178; -} - -.token.attr-value .token.punctuation.attr-equals { - color: #d4d4d4; -} - -.token.entity { - color: #569cd6; -} - -.token.namespace { - color: #4ec9b0; -} -/********************************************************* -* Language Specific -*/ - -pre[class*='language-javascript'], -code[class*='language-javascript'], -pre[class*='language-jsx'], -code[class*='language-jsx'], -pre[class*='language-typescript'], -code[class*='language-typescript'], -pre[class*='language-tsx'], -code[class*='language-tsx'] { - color: #9cdcfe; -} - -pre[class*='language-css'], -code[class*='language-css'] { - color: #ce9178; -} - -pre[class*='language-html'], -code[class*='language-html'] { - color: #d4d4d4; -} - -.language-regex .token.anchor { - color: #dcdcaa; -} - -.language-html .token.punctuation { - color: #808080; -} -/********************************************************* -* Line highlighting -*/ -pre[class*='language-'] > code[class*='language-'] { - position: relative; - z-index: 1; -} - -.line-highlight.line-highlight { - background: #f7ebc6; - box-shadow: inset 5px 0 0 #f7d87c; - z-index: 0; -} diff --git a/packages/ui/src/ui-component/editor/prism-light.css b/packages/ui/src/ui-component/editor/prism-light.css deleted file mode 100644 index 95d6d6eba..000000000 --- a/packages/ui/src/ui-component/editor/prism-light.css +++ /dev/null @@ -1,207 +0,0 @@ -code[class*='language-'], -pre[class*='language-'] { - text-align: left; - white-space: pre; - word-spacing: normal; - word-break: normal; - word-wrap: normal; - color: #90a4ae; - background: #fafafa; - font-family: Roboto Mono, monospace; - font-size: 1em; - line-height: 1.5em; - - -moz-tab-size: 4; - -o-tab-size: 4; - tab-size: 4; - - -webkit-hyphens: none; - -moz-hyphens: none; - -ms-hyphens: none; - hyphens: none; -} - -code[class*='language-']::-moz-selection, -pre[class*='language-']::-moz-selection, -code[class*='language-'] ::-moz-selection, -pre[class*='language-'] ::-moz-selection { - background: #cceae7; - color: #263238; -} - -code[class*='language-']::selection, -pre[class*='language-']::selection, -code[class*='language-'] ::selection, -pre[class*='language-'] ::selection { - background: #cceae7; - color: #263238; -} - -:not(pre) > code[class*='language-'] { - white-space: normal; - border-radius: 0.2em; - padding: 0.1em; -} - -pre[class*='language-'] { - overflow: auto; - position: relative; - margin: 0.5em 0; - padding: 1.25em 1em; -} - -.language-css > code, -.language-sass > code, -.language-scss > code { - color: #f76d47; -} - -[class*='language-'] .namespace { - opacity: 0.7; -} - -.token.atrule { - color: #7c4dff; -} - -.token.attr-name { - color: #39adb5; -} - -.token.attr-value { - color: #f6a434; -} - -.token.attribute { - color: #f6a434; -} - -.token.boolean { - color: #7c4dff; -} - -.token.builtin { - color: #39adb5; -} - -.token.cdata { - color: #39adb5; -} - -.token.char { - color: #39adb5; -} - -.token.class { - color: #39adb5; -} - -.token.class-name { - color: #6182b8; -} - -.token.comment { - color: #aabfc9; -} - -.token.constant { - color: #7c4dff; -} - -.token.deleted { - color: #e53935; -} - -.token.doctype { - color: #aabfc9; -} - -.token.entity { - color: #e53935; -} - -.token.function { - color: #7c4dff; -} - -.token.hexcode { - color: #f76d47; -} - -.token.id { - color: #7c4dff; - font-weight: bold; -} - -.token.important { - color: #7c4dff; - font-weight: bold; -} - -.token.inserted { - color: #39adb5; -} - -.token.keyword { - color: #7c4dff; -} - -.token.number { - color: #f76d47; -} - -.token.operator { - color: #39adb5; -} - -.token.prolog { - color: #aabfc9; -} - -.token.property { - color: #39adb5; -} - -.token.pseudo-class { - color: #f6a434; -} - -.token.pseudo-element { - color: #f6a434; -} - -.token.punctuation { - color: #39adb5; -} - -.token.regex { - color: #6182b8; -} - -.token.selector { - color: #e53935; -} - -.token.string { - color: #f6a434; -} - -.token.symbol { - color: #7c4dff; -} - -.token.tag { - color: #e53935; -} - -.token.unit { - color: #f76d47; -} - -.token.url { - color: #e53935; -} - -.token.variable { - color: #e53935; -} diff --git a/packages/ui/src/ui-component/input/Input.js b/packages/ui/src/ui-component/input/Input.js index 6993847b0..3e5759386 100644 --- a/packages/ui/src/ui-component/input/Input.js +++ b/packages/ui/src/ui-component/input/Input.js @@ -1,23 +1,10 @@ import { useState, useEffect, useRef } from 'react' import PropTypes from 'prop-types' import { FormControl, OutlinedInput, Popover } from '@mui/material' -import ExpandTextDialog from 'ui-component/dialog/ExpandTextDialog' import SelectVariable from 'ui-component/json/SelectVariable' import { getAvailableNodesForVariable } from 'utils/genericHelper' -export const Input = ({ - inputParam, - value, - nodes, - edges, - nodeId, - onChange, - disabled = false, - showDialog, - dialogProps, - onDialogCancel, - onDialogConfirm -}) => { +export const Input = ({ inputParam, value, nodes, edges, nodeId, onChange, disabled = false }) => { const [myValue, setMyValue] = useState(value ?? '') const [anchorEl, setAnchorEl] = useState(null) const [availableNodesForVariable, setAvailableNodesForVariable] = useState([]) @@ -86,17 +73,6 @@ export const Input = ({ }} /> - {showDialog && ( - { - setMyValue(newValue) - onDialogConfirm(newValue, inputParamName) - }} - > - )}
{inputParam?.acceptVariable && ( diff --git a/packages/ui/src/views/canvas/NodeInputHandler.js b/packages/ui/src/views/canvas/NodeInputHandler.js index 892a6273d..92a43cf80 100644 --- a/packages/ui/src/views/canvas/NodeInputHandler.js +++ b/packages/ui/src/views/canvas/NodeInputHandler.js @@ -22,8 +22,11 @@ import { flowContext } from 'store/context/ReactFlowContext' import { isValidConnection } from 'utils/genericHelper' import { JsonEditorInput } from 'ui-component/json/JsonEditor' import { TooltipWithParser } from 'ui-component/tooltip/TooltipWithParser' +import { CodeEditor } from 'ui-component/editor/CodeEditor' + import ToolDialog from 'views/tools/ToolDialog' import AssistantDialog from 'views/assistants/AssistantDialog' +import ExpandTextDialog from 'ui-component/dialog/ExpandTextDialog' import FormatPromptValuesDialog from 'ui-component/dialog/FormatPromptValuesDialog' import CredentialInputHandler from './CredentialInputHandler' @@ -83,7 +86,7 @@ const NodeInputHandler = ({ inputAnchor, inputParam, data, disabled = false, isA } } } - const onFormatPromptValuesClicked = (value, inputParam) => { + const onEditJSONClicked = (value, inputParam) => { // Preset values if the field is format prompt values let inputValue = value if (inputParam.name === 'promptValues' && !value) { @@ -255,7 +258,7 @@ const NodeInputHandler = ({ inputAnchor, inputParam, data, disabled = false, isA {inputParam.description && }
- {inputParam.type === 'string' && inputParam.rows && ( + {((inputParam.type === 'string' && inputParam.rows) || inputParam.type === 'code') && ( (data.inputs[inputParam.name] = newValue)} /> )} + {inputParam.type === 'code' && ( + <> +
+
+ (data.inputs[inputParam.name] = code)} + basicSetup={{ highlightActiveLine: false, highlightActiveLineGutter: false }} + /> +
+ + )} {(inputParam.type === 'string' || inputParam.type === 'password' || inputParam.type === 'number') && ( setShowExpandDialog(false)} - onDialogConfirm={(newValue, inputParamName) => onExpandDialogSave(newValue, inputParamName)} /> )} {inputParam.type === 'json' && ( @@ -353,11 +369,12 @@ const NodeInputHandler = ({ inputAnchor, inputParam, data, disabled = false, isA {inputParam?.acceptVariable && ( <> setAsyncOptionEditDialog('')} onConfirm={onConfirmAsyncOption} > + setShowExpandDialog(false)} + onConfirm={(newValue, inputParamName) => onExpandDialogSave(newValue, inputParamName)} + > ) } diff --git a/packages/ui/src/views/tools/ToolDialog.js b/packages/ui/src/views/tools/ToolDialog.js index 398e9eb8d..6272e05fa 100644 --- a/packages/ui/src/views/tools/ToolDialog.js +++ b/packages/ui/src/views/tools/ToolDialog.js @@ -12,9 +12,7 @@ import { TooltipWithParser } from 'ui-component/tooltip/TooltipWithParser' import { GridActionsCellItem } from '@mui/x-data-grid' import DeleteIcon from '@mui/icons-material/Delete' import ConfirmDialog from 'ui-component/dialog/ConfirmDialog' -import { DarkCodeEditor } from 'ui-component/editor/DarkCodeEditor' -import { LightCodeEditor } from 'ui-component/editor/LightCodeEditor' -import { useTheme } from '@mui/material/styles' +import { CodeEditor } from 'ui-component/editor/CodeEditor' // Icons import { IconX, IconFileExport } from '@tabler/icons' @@ -56,7 +54,6 @@ try { const ToolDialog = ({ show, dialogProps, onUseTemplate, onCancel, onConfirm }) => { const portalElement = document.getElementById('portal') - const theme = useTheme() const customization = useSelector((state) => state.customization) const dispatch = useDispatch() @@ -490,32 +487,14 @@ const ToolDialog = ({ show, dialogProps, onUseTemplate, onCancel, onConfirm }) = See Example )} - {customization.isDarkMode ? ( - setToolFunc(code)} - style={{ - fontSize: '0.875rem', - minHeight: 'calc(100vh - 220px)', - width: '100%', - borderRadius: 5 - }} - /> - ) : ( - setToolFunc(code)} - style={{ - fontSize: '0.875rem', - minHeight: 'calc(100vh - 220px)', - width: '100%', - border: `1px solid ${theme.palette.grey[300]}`, - borderRadius: 5 - }} - /> - )} + setToolFunc(code)} + />