Add marketplaces

This commit is contained in:
Henry 2023-04-10 17:47:25 +01:00
parent 58e06718d1
commit c576ea5b67
24 changed files with 2665 additions and 15 deletions

View File

@ -2,6 +2,7 @@ import express, { Request, Response } from 'express'
import path from 'path'
import cors from 'cors'
import http from 'http'
import * as fs from 'fs'
import { IChatFlow, IncomingInput, IReactFlowNode, IReactFlowObject } from './Interface'
import { getNodeModulesPackagePath, getStartingNode, buildLangchain, getEndingNode, constructGraphs } from './utils'
@ -236,6 +237,30 @@ export class App {
}
})
// ----------------------------------------
// Marketplaces
// ----------------------------------------
// Get all chatflows for marketplaces
this.app.get('/api/v1/marketplaces', async (req: Request, res: Response) => {
const marketplaceDir = path.join(__dirname, '..', 'src/marketplaces')
const jsonsInDir = fs.readdirSync(marketplaceDir).filter((file) => path.extname(file) === '.json')
const templates: any[] = []
jsonsInDir.forEach((file, index) => {
const filePath = path.join(__dirname, '..', 'src/marketplaces', file)
const fileData = fs.readFileSync(filePath)
const fileDataObj = JSON.parse(fileData.toString())
const template = {
id: index,
name: file.split('.json')[0],
flowData: fileData.toString(),
description: fileDataObj?.description || ''
}
templates.push(template)
})
return res.json(templates)
})
// ----------------------------------------
// Serve UI static
// ----------------------------------------

View File

@ -0,0 +1,313 @@
{
"description": "Output antonym of given user input using few-shot prompt template built with examples",
"nodes": [
{
"width": 300,
"height": 885,
"id": "fewShotPromptTemplate_0",
"position": {
"x": 495.78246013667433,
"y": 168.3684510250569
},
"type": "customNode",
"data": {
"id": "fewShotPromptTemplate_0",
"label": "Few Shot Prompt Template",
"name": "fewShotPromptTemplate",
"type": "FewShotPromptTemplate",
"baseClasses": ["BaseStringPromptTemplate", "BasePromptTemplate"],
"category": "Prompts",
"description": "Prompt template you can build with examples",
"inputParams": [
{
"label": "Examples",
"name": "examples",
"type": "string",
"rows": 5,
"placeholder": "[\n { word: \"happy\", antonym: \"sad\" },\n { word: \"tall\", antonym: \"short\" },\n]"
},
{
"label": "Prefix",
"name": "prefix",
"type": "string",
"rows": 3,
"placeholder": "Give the antonym of every input"
},
{
"label": "Suffix",
"name": "suffix",
"type": "string",
"rows": 3,
"placeholder": "Word: {input}\nAntonym:"
},
{
"label": "Example Seperator",
"name": "exampleSeparator",
"type": "string",
"placeholder": "\n\n"
},
{
"label": "Template Format",
"name": "templateFormat",
"type": "options",
"options": [
{
"label": "f-string",
"name": "f-string"
},
{
"label": "jinja-2",
"name": "jinja-2"
}
],
"default": "f-string"
}
],
"inputAnchors": [
{
"label": "Example Prompt",
"name": "examplePrompt",
"type": "BasePromptTemplate",
"id": "fewShotPromptTemplate_0-input-examplePrompt-BasePromptTemplate"
}
],
"inputs": {
"examples": "[\n { \"word\": \"happy\", \"antonym\": \"sad\" },\n { \"word\": \"tall\", \"antonym\": \"short\" }\n]",
"examplePrompt": "{{promptTemplate_0.data.instance}}",
"prefix": "Give the antonym of every input",
"suffix": "Word: {input}\\nAntonym:",
"exampleSeparator": "\\n\\n",
"templateFormat": "f-string"
},
"outputAnchors": [
{
"id": "fewShotPromptTemplate_0-output-fewShotPromptTemplate-BaseStringPromptTemplate|BasePromptTemplate",
"name": "fewShotPromptTemplate",
"label": "FewShotPromptTemplate",
"type": "BaseStringPromptTemplate | BasePromptTemplate"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 495.78246013667433,
"y": 168.3684510250569
},
"dragging": false
},
{
"width": 300,
"height": 359,
"id": "promptTemplate_0",
"position": {
"x": 13.229214123006699,
"y": 171.79555808656028
},
"type": "customNode",
"data": {
"id": "promptTemplate_0",
"label": "Prompt Template",
"name": "promptTemplate",
"type": "PromptTemplate",
"baseClasses": ["BaseStringPromptTemplate", "BasePromptTemplate"],
"category": "Prompts",
"description": "Schema to represent a basic prompt for an LLM",
"inputParams": [
{
"label": "Template",
"name": "template",
"type": "string",
"rows": 5,
"placeholder": "What is a good name for a company that makes {product}?"
}
],
"inputAnchors": [],
"inputs": {
"template": "Word: {word}\\nAntonym: {antonym}\\n"
},
"outputAnchors": [
{
"id": "promptTemplate_0-output-promptTemplate-BaseStringPromptTemplate|BasePromptTemplate",
"name": "promptTemplate",
"label": "PromptTemplate",
"type": "BaseStringPromptTemplate | BasePromptTemplate"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 13.229214123006699,
"y": 171.79555808656028
},
"dragging": false
},
{
"width": 300,
"height": 279,
"id": "llmChain_0",
"position": {
"x": 1237.4411644942688,
"y": 508.82448993622904
},
"type": "customNode",
"data": {
"id": "llmChain_0",
"label": "LLM Chain",
"name": "llmChain",
"type": "LLMChain",
"baseClasses": ["BaseChain"],
"category": "Chains",
"description": "Chain to run queries against LLMs",
"inputParams": [],
"inputAnchors": [
{
"label": "LLM",
"name": "llm",
"type": "BaseLanguageModel",
"id": "llmChain_0-input-llm-BaseLanguageModel"
},
{
"label": "Prompt",
"name": "prompt",
"type": "BasePromptTemplate",
"id": "llmChain_0-input-prompt-BasePromptTemplate"
}
],
"inputs": {
"llm": "{{openAI_0.data.instance}}",
"prompt": "{{fewShotPromptTemplate_0.data.instance}}"
},
"outputAnchors": [
{
"id": "llmChain_0-output-llmChain-BaseChain",
"name": "llmChain",
"label": "LLMChain",
"type": "BaseChain"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1237.4411644942688,
"y": 508.82448993622904
},
"dragging": false
},
{
"width": 300,
"height": 471,
"id": "openAI_0",
"position": {
"x": 859.220671981777,
"y": 166.25170842824588
},
"type": "customNode",
"data": {
"id": "openAI_0",
"label": "OpenAI",
"name": "openAI",
"type": "OpenAI",
"baseClasses": ["BaseLLM", "BaseLanguageModel"],
"category": "LLMs",
"description": "Wrapper around OpenAI large language models",
"inputParams": [
{
"label": "OpenAI Api Key",
"name": "openAIApiKey",
"type": "password"
},
{
"label": "Model Name",
"name": "modelName",
"type": "options",
"options": [
{
"label": "text-davinci-003",
"name": "text-davinci-003"
},
{
"label": "text-davinci-002",
"name": "text-davinci-002"
},
{
"label": "text-curie-001",
"name": "text-curie-001"
},
{
"label": "text-babbage-001",
"name": "text-babbage-001"
}
],
"default": "text-davinci-003",
"optional": true
},
{
"label": "Temperature",
"name": "temperature",
"type": "number",
"default": 0.7,
"optional": true
}
],
"inputAnchors": [],
"inputs": {
"modelName": "text-davinci-003",
"temperature": 0.7
},
"outputAnchors": [
{
"id": "openAI_0-output-openAI-BaseLLM|BaseLanguageModel",
"name": "openAI",
"label": "OpenAI",
"type": "BaseLLM | BaseLanguageModel"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 859.220671981777,
"y": 166.25170842824588
},
"dragging": false
}
],
"edges": [
{
"source": "promptTemplate_0",
"sourceHandle": "promptTemplate_0-output-promptTemplate-BaseStringPromptTemplate|BasePromptTemplate",
"target": "fewShotPromptTemplate_0",
"targetHandle": "fewShotPromptTemplate_0-input-examplePrompt-BasePromptTemplate",
"type": "buttonedge",
"id": "promptTemplate_0-promptTemplate_0-output-promptTemplate-BaseStringPromptTemplate|BasePromptTemplate-fewShotPromptTemplate_0-fewShotPromptTemplate_0-input-examplePrompt-BasePromptTemplate",
"data": {
"label": ""
}
},
{
"source": "fewShotPromptTemplate_0",
"sourceHandle": "fewShotPromptTemplate_0-output-fewShotPromptTemplate-BaseStringPromptTemplate|BasePromptTemplate",
"target": "llmChain_0",
"targetHandle": "llmChain_0-input-prompt-BasePromptTemplate",
"type": "buttonedge",
"id": "fewShotPromptTemplate_0-fewShotPromptTemplate_0-output-fewShotPromptTemplate-BaseStringPromptTemplate|BasePromptTemplate-llmChain_0-llmChain_0-input-prompt-BasePromptTemplate",
"data": {
"label": ""
}
},
{
"source": "openAI_0",
"sourceHandle": "openAI_0-output-openAI-BaseLLM|BaseLanguageModel",
"target": "llmChain_0",
"targetHandle": "llmChain_0-input-llm-BaseLanguageModel",
"type": "buttonedge",
"id": "openAI_0-openAI_0-output-openAI-BaseLLM|BaseLanguageModel-llmChain_0-llmChain_0-input-llm-BaseLanguageModel",
"data": {
"label": ""
}
}
]
}

View File

@ -0,0 +1,315 @@
{
"description": "A conversational agent for a chat model which utilize chat specific prompts",
"nodes": [
{
"width": 300,
"height": 277,
"id": "serpAPI_0",
"position": {
"x": 738.3791942291381,
"y": 61.26790912730354
},
"type": "customNode",
"data": {
"id": "serpAPI_0",
"label": "Serp API",
"name": "serpAPI",
"type": "SerpAPI",
"baseClasses": ["Tool"],
"category": "Tools",
"description": "Wrapper around SerpAPI - a real-time API to access Google search results",
"inputParams": [
{
"label": "Serp Api Key",
"name": "apiKey",
"type": "password"
}
],
"inputAnchors": [],
"inputs": {},
"outputAnchors": [
{
"id": "serpAPI_0-output-serpAPI-Tool",
"name": "serpAPI",
"label": "SerpAPI",
"type": "Tool"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 738.3791942291381,
"y": 61.26790912730354
},
"dragging": false
},
{
"width": 300,
"height": 142,
"id": "calculator_0",
"position": {
"x": 1088.946090950564,
"y": 63.99579982092973
},
"type": "customNode",
"data": {
"id": "calculator_0",
"label": "Calculator",
"name": "calculator",
"type": "Calculator",
"baseClasses": ["Tool"],
"category": "Tools",
"description": "Perform calculations on response",
"inputParams": [],
"inputAnchors": [],
"inputs": {},
"outputAnchors": [
{
"id": "calculator_0-output-calculator-Tool",
"name": "calculator",
"label": "Calculator",
"type": "Tool"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1088.946090950564,
"y": 63.99579982092973
},
"dragging": false
},
{
"width": 300,
"height": 471,
"id": "chatOpenAI_0",
"position": {
"x": 741.0274881835038,
"y": 365.0891876953251
},
"type": "customNode",
"data": {
"id": "chatOpenAI_0",
"label": "ChatOpenAI",
"name": "chatOpenAI",
"type": "ChatOpenAI",
"baseClasses": ["BaseChatModel", "BaseLanguageModel"],
"category": "Chat Models",
"description": "Wrapper around OpenAI large language models that use the Chat endpoint",
"inputParams": [
{
"label": "OpenAI Api Key",
"name": "openAIApiKey",
"type": "password"
},
{
"label": "Model Name",
"name": "modelName",
"type": "options",
"options": [
{
"label": "gpt-3.5-turbo",
"name": "gpt-3.5-turbo"
},
{
"label": "gpt-3.5-turbo-0301",
"name": "gpt-3.5-turbo-0301"
}
],
"default": "gpt-3.5-turbo",
"optional": true
},
{
"label": "Temperature",
"name": "temperature",
"type": "number",
"default": 0.9,
"optional": true
}
],
"inputAnchors": [],
"inputs": {
"modelName": "gpt-3.5-turbo",
"temperature": 0.9
},
"outputAnchors": [
{
"id": "chatOpenAI_0-output-chatOpenAI-BaseChatModel|BaseLanguageModel",
"name": "chatOpenAI",
"label": "ChatOpenAI",
"type": "BaseChatModel | BaseLanguageModel"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 741.0274881835038,
"y": 365.0891876953251
},
"dragging": false
},
{
"width": 300,
"height": 376,
"id": "bufferMemory_0",
"position": {
"x": 753.3628847860326,
"y": 864.8446075184364
},
"type": "customNode",
"data": {
"id": "bufferMemory_0",
"label": "Buffer Memory",
"name": "bufferMemory",
"type": "BufferMemory",
"baseClasses": ["BaseChatMemory", "BaseMemory"],
"category": "Memory",
"description": "Perform calculations on response",
"inputParams": [
{
"label": "Memory Key",
"name": "memoryKey",
"type": "string",
"default": "chat_history"
},
{
"label": "Input Key",
"name": "inputKey",
"type": "string",
"default": "input"
}
],
"inputAnchors": [],
"inputs": {
"memoryKey": "chat_history",
"inputKey": "input"
},
"outputAnchors": [
{
"id": "bufferMemory_0-output-bufferMemory-BaseChatMemory|BaseMemory",
"name": "bufferMemory",
"label": "BufferMemory",
"type": "BaseChatMemory | BaseMemory"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 753.3628847860326,
"y": 864.8446075184364
},
"dragging": false
},
{
"width": 300,
"height": 330,
"id": "conversationalAgent_0",
"position": {
"x": 1487.0651648211865,
"y": 497.1658250180486
},
"type": "customNode",
"data": {
"id": "conversationalAgent_0",
"label": "Conversational Agent",
"name": "conversationalAgent",
"type": "AgentExecutor",
"baseClasses": ["AgentExecutor"],
"category": "Agents",
"description": "Conversational agent for a chat model. It will utilize chat specific prompts",
"inputParams": [],
"inputAnchors": [
{
"label": "Allowed Tools",
"name": "tools",
"type": "Tool",
"list": true,
"id": "conversationalAgent_0-input-tools-Tool"
},
{
"label": "Chat Model",
"name": "model",
"type": "BaseChatModel",
"id": "conversationalAgent_0-input-model-BaseChatModel"
},
{
"label": "Memory",
"name": "memory",
"type": "BaseChatMemory",
"id": "conversationalAgent_0-input-memory-BaseChatMemory"
}
],
"inputs": {
"tools": ["{{calculator_0.data.instance}}", "{{serpAPI_0.data.instance}}"],
"model": "{{chatOpenAI_0.data.instance}}",
"memory": "{{bufferMemory_0.data.instance}}"
},
"outputAnchors": [
{
"id": "conversationalAgent_0-output-conversationalAgent-AgentExecutor",
"name": "conversationalAgent",
"label": "AgentExecutor",
"type": "AgentExecutor"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1487.0651648211865,
"y": 497.1658250180486
},
"dragging": false
}
],
"edges": [
{
"source": "calculator_0",
"sourceHandle": "calculator_0-output-calculator-Tool",
"target": "conversationalAgent_0",
"targetHandle": "conversationalAgent_0-input-tools-Tool",
"type": "buttonedge",
"id": "calculator_0-calculator_0-output-calculator-Tool-conversationalAgent_0-conversationalAgent_0-input-tools-Tool",
"data": {
"label": ""
}
},
{
"source": "serpAPI_0",
"sourceHandle": "serpAPI_0-output-serpAPI-Tool",
"target": "conversationalAgent_0",
"targetHandle": "conversationalAgent_0-input-tools-Tool",
"type": "buttonedge",
"id": "serpAPI_0-serpAPI_0-output-serpAPI-Tool-conversationalAgent_0-conversationalAgent_0-input-tools-Tool",
"data": {
"label": ""
}
},
{
"source": "chatOpenAI_0",
"sourceHandle": "chatOpenAI_0-output-chatOpenAI-BaseChatModel|BaseLanguageModel",
"target": "conversationalAgent_0",
"targetHandle": "conversationalAgent_0-input-model-BaseChatModel",
"type": "buttonedge",
"id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-BaseChatModel|BaseLanguageModel-conversationalAgent_0-conversationalAgent_0-input-model-BaseChatModel",
"data": {
"label": ""
}
},
{
"source": "bufferMemory_0",
"sourceHandle": "bufferMemory_0-output-bufferMemory-BaseChatMemory|BaseMemory",
"target": "conversationalAgent_0",
"targetHandle": "conversationalAgent_0-input-memory-BaseChatMemory",
"type": "buttonedge",
"id": "bufferMemory_0-bufferMemory_0-output-bufferMemory-BaseChatMemory|BaseMemory-conversationalAgent_0-conversationalAgent_0-input-memory-BaseChatMemory",
"data": {
"label": ""
}
}
]
}

View File

@ -0,0 +1,415 @@
{
"description": "Text file QnA using conversational retrieval QA chain",
"nodes": [
{
"width": 300,
"height": 376,
"id": "recursiveCharacterTextSplitter_0",
"position": {
"x": 542.7867965644035,
"y": 239.47308806541884
},
"type": "customNode",
"data": {
"id": "recursiveCharacterTextSplitter_0",
"label": "Recursive Character Text Splitter",
"name": "recursiveCharacterTextSplitter",
"type": "RecursiveCharacterTextSplitter",
"baseClasses": ["TextSplitter"],
"category": "Text Splitters",
"description": "Split documents recursively by different characters - starting with \"\n\n\", then \"\n\", then \" \"",
"inputParams": [
{
"label": "Chunk Size",
"name": "chunkSize",
"type": "number",
"default": 1000,
"optional": true
},
{
"label": "Chunk Overlap",
"name": "chunkOverlap",
"type": "number",
"optional": true
}
],
"inputAnchors": [],
"inputs": {
"chunkSize": 1000,
"chunkOverlap": ""
},
"outputAnchors": [
{
"id": "recursiveCharacterTextSplitter_0-output-recursiveCharacterTextSplitter-TextSplitter",
"name": "recursiveCharacterTextSplitter",
"label": "RecursiveCharacterTextSplitter",
"type": "TextSplitter"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 542.7867965644035,
"y": 239.47308806541884
},
"dragging": false
},
{
"width": 300,
"height": 358,
"id": "textFile_0",
"position": {
"x": 928.3774169979697,
"y": 473.8284271247462
},
"type": "customNode",
"data": {
"id": "textFile_0",
"label": "Text File",
"name": "textFile",
"type": "Text",
"baseClasses": ["Document"],
"category": "Document Loaders",
"description": "Load data from text files",
"inputParams": [
{
"label": "Txt File",
"name": "txtFile",
"type": "file",
"fileType": ".txt"
}
],
"inputAnchors": [
{
"label": "Text Splitter",
"name": "textSplitter",
"type": "TextSplitter",
"optional": true,
"id": "textFile_0-input-textSplitter-TextSplitter"
}
],
"inputs": {
"textSplitter": "{{recursiveCharacterTextSplitter_0.data.instance}}"
},
"outputAnchors": [
{
"id": "textFile_0-output-textFile-Document",
"name": "textFile",
"label": "Text",
"type": "Document"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 928.3774169979697,
"y": 473.8284271247462
},
"dragging": false
},
{
"width": 300,
"height": 277,
"id": "openAIEmbeddings_0",
"position": {
"x": 924.7825209307788,
"y": 870.671152679558
},
"type": "customNode",
"data": {
"id": "openAIEmbeddings_0",
"label": "OpenAI Embeddings",
"name": "openAIEmbeddings",
"type": "OpenAIEmbeddings",
"baseClasses": ["Embeddings"],
"category": "Embeddings",
"description": "OpenAI API to generate embeddings for a given text",
"inputParams": [
{
"label": "OpenAI Api Key",
"name": "openAIApiKey",
"type": "password"
}
],
"inputAnchors": [],
"inputs": {},
"outputAnchors": [
{
"id": "openAIEmbeddings_0-output-openAIEmbeddings-Embeddings",
"name": "openAIEmbeddings",
"label": "OpenAIEmbeddings",
"type": "Embeddings"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 924.7825209307788,
"y": 870.671152679558
},
"dragging": false
},
{
"width": 300,
"height": 471,
"id": "openAI_0",
"position": {
"x": 1296.7206878349027,
"y": 167.80701218012993
},
"type": "customNode",
"data": {
"id": "openAI_0",
"label": "OpenAI",
"name": "openAI",
"type": "OpenAI",
"baseClasses": ["BaseLLM", "BaseLanguageModel"],
"category": "LLMs",
"description": "Wrapper around OpenAI large language models",
"inputParams": [
{
"label": "OpenAI Api Key",
"name": "openAIApiKey",
"type": "password"
},
{
"label": "Model Name",
"name": "modelName",
"type": "options",
"options": [
{
"label": "text-davinci-003",
"name": "text-davinci-003"
},
{
"label": "text-davinci-002",
"name": "text-davinci-002"
},
{
"label": "text-curie-001",
"name": "text-curie-001"
},
{
"label": "text-babbage-001",
"name": "text-babbage-001"
}
],
"default": "text-davinci-003",
"optional": true
},
{
"label": "Temperature",
"name": "temperature",
"type": "number",
"default": 0.7,
"optional": true
}
],
"inputAnchors": [],
"inputs": {
"modelName": "text-davinci-003",
"temperature": 0.7
},
"outputAnchors": [
{
"id": "openAI_0-output-openAI-BaseLLM|BaseLanguageModel",
"name": "openAI",
"label": "OpenAI",
"type": "BaseLLM | BaseLanguageModel"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1296.7206878349027,
"y": 167.80701218012993
},
"dragging": false
},
{
"width": 300,
"height": 576,
"id": "pineconeUpsert_0",
"position": {
"x": 1293.8922607101565,
"y": 692.4802438205481
},
"type": "customNode",
"data": {
"id": "pineconeUpsert_0",
"label": "Pinecone Upsert Document",
"name": "pineconeUpsert",
"type": "Pinecone",
"baseClasses": ["BaseRetriever"],
"category": "Vector Stores",
"description": "Upsert documents to Pinecone",
"inputParams": [
{
"label": "Pinecone Api Key",
"name": "pineconeApiKey",
"type": "password"
},
{
"label": "Pinecone Environment",
"name": "pineconeEnv",
"type": "string"
},
{
"label": "Pinecone Index",
"name": "pineconeIndex",
"type": "string"
}
],
"inputAnchors": [
{
"label": "Document",
"name": "document",
"type": "Document",
"id": "pineconeUpsert_0-input-document-Document"
},
{
"label": "Embeddings",
"name": "embeddings",
"type": "Embeddings",
"id": "pineconeUpsert_0-input-embeddings-Embeddings"
}
],
"inputs": {
"document": "{{textFile_0.data.instance}}",
"embeddings": "{{openAIEmbeddings_0.data.instance}}",
"pineconeEnv": "us-west4-gcp",
"pineconeIndex": "test"
},
"outputAnchors": [
{
"id": "pineconeUpsert_0-output-pineconeUpsert-BaseRetriever",
"name": "pineconeUpsert",
"label": "Pinecone",
"type": "BaseRetriever"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1293.8922607101565,
"y": 692.4802438205481
},
"dragging": false
},
{
"width": 300,
"height": 279,
"id": "conversationalRetrievalQAChain_0",
"position": {
"x": 1742.1979599824272,
"y": 607.6274300781624
},
"type": "customNode",
"data": {
"id": "conversationalRetrievalQAChain_0",
"label": "Conversational Retrieval QA Chain",
"name": "conversationalRetrievalQAChain",
"type": "ConversationalRetrievalQAChain",
"baseClasses": ["BaseChain"],
"category": "Chains",
"description": "Document QA - built on RetrievalQAChain to provide a chat history component",
"inputParams": [],
"inputAnchors": [
{
"label": "LLM",
"name": "llm",
"type": "BaseLanguageModel",
"id": "conversationalRetrievalQAChain_0-input-llm-BaseLanguageModel"
},
{
"label": "Vector Store Retriever",
"name": "vectorStoreRetriever",
"type": "BaseRetriever",
"id": "conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever"
}
],
"inputs": {
"llm": "{{openAI_0.data.instance}}",
"vectorStoreRetriever": "{{pineconeUpsert_0.data.instance}}"
},
"outputAnchors": [
{
"id": "conversationalRetrievalQAChain_0-output-conversationalRetrievalQAChain-BaseChain",
"name": "conversationalRetrievalQAChain",
"label": "ConversationalRetrievalQAChain",
"type": "BaseChain"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1742.1979599824272,
"y": 607.6274300781624
},
"dragging": false
}
],
"edges": [
{
"source": "recursiveCharacterTextSplitter_0",
"sourceHandle": "recursiveCharacterTextSplitter_0-output-recursiveCharacterTextSplitter-TextSplitter",
"target": "textFile_0",
"targetHandle": "textFile_0-input-textSplitter-TextSplitter",
"type": "buttonedge",
"id": "recursiveCharacterTextSplitter_0-recursiveCharacterTextSplitter_0-output-recursiveCharacterTextSplitter-TextSplitter-textFile_0-textFile_0-input-textSplitter-TextSplitter",
"data": {
"label": ""
}
},
{
"source": "openAI_0",
"sourceHandle": "openAI_0-output-openAI-BaseLLM|BaseLanguageModel",
"target": "conversationalRetrievalQAChain_0",
"targetHandle": "conversationalRetrievalQAChain_0-input-llm-BaseLanguageModel",
"type": "buttonedge",
"id": "openAI_0-openAI_0-output-openAI-BaseLLM|BaseLanguageModel-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-llm-BaseLanguageModel",
"data": {
"label": ""
}
},
{
"source": "pineconeUpsert_0",
"sourceHandle": "pineconeUpsert_0-output-pineconeUpsert-BaseRetriever",
"target": "conversationalRetrievalQAChain_0",
"targetHandle": "conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever",
"type": "buttonedge",
"id": "pineconeUpsert_0-pineconeUpsert_0-output-pineconeUpsert-BaseRetriever-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever",
"data": {
"label": ""
}
},
{
"source": "textFile_0",
"sourceHandle": "textFile_0-output-textFile-Document",
"target": "pineconeUpsert_0",
"targetHandle": "pineconeUpsert_0-input-document-Document",
"type": "buttonedge",
"id": "textFile_0-textFile_0-output-textFile-Document-pineconeUpsert_0-pineconeUpsert_0-input-document-Document",
"data": {
"label": ""
}
},
{
"source": "openAIEmbeddings_0",
"sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-Embeddings",
"target": "pineconeUpsert_0",
"targetHandle": "pineconeUpsert_0-input-embeddings-Embeddings",
"type": "buttonedge",
"id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-Embeddings-pineconeUpsert_0-pineconeUpsert_0-input-embeddings-Embeddings",
"data": {
"label": ""
}
}
]
}

View File

@ -0,0 +1,430 @@
{
"description": "Github repo QnA using conversational retrieval QA chain",
"nodes": [
{
"width": 300,
"height": 279,
"id": "conversationalRetrievalQAChain_0",
"position": {
"x": 1557.4588265034258,
"y": 314.1114881397983
},
"type": "customNode",
"data": {
"id": "conversationalRetrievalQAChain_0",
"label": "Conversational Retrieval QA Chain",
"name": "conversationalRetrievalQAChain",
"type": "ConversationalRetrievalQAChain",
"baseClasses": ["BaseChain"],
"category": "Chains",
"description": "Document QA - built on RetrievalQAChain to provide a chat history component",
"inputParams": [],
"inputAnchors": [
{
"label": "LLM",
"name": "llm",
"type": "BaseLanguageModel",
"id": "conversationalRetrievalQAChain_0-input-llm-BaseLanguageModel"
},
{
"label": "Vector Store Retriever",
"name": "vectorStoreRetriever",
"type": "BaseRetriever",
"id": "conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever"
}
],
"inputs": {
"llm": "{{openAI_0.data.instance}}",
"vectorStoreRetriever": "{{pineconeUpsert_0.data.instance}}"
},
"outputAnchors": [
{
"id": "conversationalRetrievalQAChain_0-output-conversationalRetrievalQAChain-BaseChain",
"name": "conversationalRetrievalQAChain",
"label": "ConversationalRetrievalQAChain",
"type": "BaseChain"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1557.4588265034258,
"y": 314.1114881397983
},
"dragging": false
},
{
"width": 300,
"height": 471,
"id": "openAI_0",
"position": {
"x": 1079.79101466888,
"y": -178.54116849152098
},
"type": "customNode",
"data": {
"id": "openAI_0",
"label": "OpenAI",
"name": "openAI",
"type": "OpenAI",
"baseClasses": ["BaseLLM", "BaseLanguageModel"],
"category": "LLMs",
"description": "Wrapper around OpenAI large language models",
"inputParams": [
{
"label": "OpenAI Api Key",
"name": "openAIApiKey",
"type": "password"
},
{
"label": "Model Name",
"name": "modelName",
"type": "options",
"options": [
{
"label": "text-davinci-003",
"name": "text-davinci-003"
},
{
"label": "text-davinci-002",
"name": "text-davinci-002"
},
{
"label": "text-curie-001",
"name": "text-curie-001"
},
{
"label": "text-babbage-001",
"name": "text-babbage-001"
}
],
"default": "text-davinci-003",
"optional": true
},
{
"label": "Temperature",
"name": "temperature",
"type": "number",
"default": 0.7,
"optional": true
}
],
"inputAnchors": [],
"inputs": {
"modelName": "text-davinci-003",
"temperature": "0"
},
"outputAnchors": [
{
"id": "openAI_0-output-openAI-BaseLLM|BaseLanguageModel",
"name": "openAI",
"label": "OpenAI",
"type": "BaseLLM | BaseLanguageModel"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1079.79101466888,
"y": -178.54116849152098
},
"dragging": false
},
{
"width": 300,
"height": 376,
"id": "recursiveCharacterTextSplitter_0",
"position": {
"x": 235.6130554027991,
"y": -89.82544163833616
},
"type": "customNode",
"data": {
"id": "recursiveCharacterTextSplitter_0",
"label": "Recursive Character Text Splitter",
"name": "recursiveCharacterTextSplitter",
"type": "RecursiveCharacterTextSplitter",
"baseClasses": ["TextSplitter"],
"category": "Text Splitters",
"description": "Split documents recursively by different characters - starting with \"\n\n\", then \"\n\", then \" \"",
"inputParams": [
{
"label": "Chunk Size",
"name": "chunkSize",
"type": "number",
"default": 1000,
"optional": true
},
{
"label": "Chunk Overlap",
"name": "chunkOverlap",
"type": "number",
"optional": true
}
],
"inputAnchors": [],
"inputs": {
"chunkSize": 1000,
"chunkOverlap": ""
},
"outputAnchors": [
{
"id": "recursiveCharacterTextSplitter_0-output-recursiveCharacterTextSplitter-TextSplitter",
"name": "recursiveCharacterTextSplitter",
"label": "RecursiveCharacterTextSplitter",
"type": "TextSplitter"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 235.6130554027991,
"y": -89.82544163833616
},
"dragging": false
},
{
"width": 300,
"height": 277,
"id": "openAIEmbeddings_0",
"position": {
"x": 654.473220763302,
"y": 508.09797567725514
},
"type": "customNode",
"data": {
"id": "openAIEmbeddings_0",
"label": "OpenAI Embeddings",
"name": "openAIEmbeddings",
"type": "OpenAIEmbeddings",
"baseClasses": ["Embeddings"],
"category": "Embeddings",
"description": "OpenAI API to generate embeddings for a given text",
"inputParams": [
{
"label": "OpenAI Api Key",
"name": "openAIApiKey",
"type": "password"
}
],
"inputAnchors": [],
"inputs": {},
"outputAnchors": [
{
"id": "openAIEmbeddings_0-output-openAIEmbeddings-Embeddings",
"name": "openAIEmbeddings",
"label": "OpenAIEmbeddings",
"type": "Embeddings"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 654.473220763302,
"y": 508.09797567725514
},
"dragging": false
},
{
"width": 300,
"height": 525,
"id": "github_0",
"position": {
"x": 649.303910738229,
"y": -93.90566658624722
},
"type": "customNode",
"data": {
"id": "github_0",
"label": "Github",
"name": "github",
"type": "Github",
"baseClasses": ["Document"],
"category": "Document Loaders",
"description": "Load data from a GitHub repository",
"inputParams": [
{
"label": "Repo Link",
"name": "repoLink",
"type": "string",
"placeholder": "https://github.com/FlowiseAI/Flowise"
},
{
"label": "Branch",
"name": "branch",
"type": "string",
"default": "main"
},
{
"label": "Access Token",
"name": "accessToken",
"type": "password",
"placeholder": "<GITHUB_ACCESS_TOKEN>",
"optional": true
}
],
"inputAnchors": [
{
"label": "Text Splitter",
"name": "textSplitter",
"type": "TextSplitter",
"optional": true,
"id": "github_0-input-textSplitter-TextSplitter"
}
],
"inputs": {
"repoLink": "https://github.com/kyrolabs/awesome-langchain",
"branch": "main",
"textSplitter": "{{recursiveCharacterTextSplitter_0.data.instance}}"
},
"outputAnchors": [
{
"id": "github_0-output-github-Document",
"name": "github",
"label": "Github",
"type": "Document"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 649.303910738229,
"y": -93.90566658624722
},
"dragging": false
},
{
"width": 300,
"height": 576,
"id": "pineconeUpsert_0",
"position": {
"x": 1089.3652950320754,
"y": 354.9656606763275
},
"type": "customNode",
"data": {
"id": "pineconeUpsert_0",
"label": "Pinecone Upsert Document",
"name": "pineconeUpsert",
"type": "Pinecone",
"baseClasses": ["BaseRetriever"],
"category": "Vector Stores",
"description": "Upsert documents to Pinecone",
"inputParams": [
{
"label": "Pinecone Api Key",
"name": "pineconeApiKey",
"type": "password"
},
{
"label": "Pinecone Environment",
"name": "pineconeEnv",
"type": "string"
},
{
"label": "Pinecone Index",
"name": "pineconeIndex",
"type": "string"
}
],
"inputAnchors": [
{
"label": "Document",
"name": "document",
"type": "Document",
"id": "pineconeUpsert_0-input-document-Document"
},
{
"label": "Embeddings",
"name": "embeddings",
"type": "Embeddings",
"id": "pineconeUpsert_0-input-embeddings-Embeddings"
}
],
"inputs": {
"document": "{{github_0.data.instance}}",
"embeddings": "{{openAIEmbeddings_0.data.instance}}",
"pineconeEnv": "us-west4-gcp",
"pineconeIndex": "test"
},
"outputAnchors": [
{
"id": "pineconeUpsert_0-output-pineconeUpsert-BaseRetriever",
"name": "pineconeUpsert",
"label": "Pinecone",
"type": "BaseRetriever"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1089.3652950320754,
"y": 354.9656606763275
},
"dragging": false
}
],
"edges": [
{
"source": "openAI_0",
"sourceHandle": "openAI_0-output-openAI-BaseLLM|BaseLanguageModel",
"target": "conversationalRetrievalQAChain_0",
"targetHandle": "conversationalRetrievalQAChain_0-input-llm-BaseLanguageModel",
"type": "buttonedge",
"id": "openAI_0-openAI_0-output-openAI-BaseLLM|BaseLanguageModel-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-llm-BaseLanguageModel",
"data": {
"label": ""
}
},
{
"source": "recursiveCharacterTextSplitter_0",
"sourceHandle": "recursiveCharacterTextSplitter_0-output-recursiveCharacterTextSplitter-TextSplitter",
"target": "github_0",
"targetHandle": "github_0-input-textSplitter-TextSplitter",
"type": "buttonedge",
"id": "recursiveCharacterTextSplitter_0-recursiveCharacterTextSplitter_0-output-recursiveCharacterTextSplitter-TextSplitter-github_0-github_0-input-textSplitter-TextSplitter",
"data": {
"label": ""
}
},
{
"source": "github_0",
"sourceHandle": "github_0-output-github-Document",
"target": "pineconeUpsert_0",
"targetHandle": "pineconeUpsert_0-input-document-Document",
"type": "buttonedge",
"id": "github_0-github_0-output-github-Document-pineconeUpsert_0-pineconeUpsert_0-input-document-Document",
"data": {
"label": ""
}
},
{
"source": "openAIEmbeddings_0",
"sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-Embeddings",
"target": "pineconeUpsert_0",
"targetHandle": "pineconeUpsert_0-input-embeddings-Embeddings",
"type": "buttonedge",
"id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-Embeddings-pineconeUpsert_0-pineconeUpsert_0-input-embeddings-Embeddings",
"data": {
"label": ""
}
},
{
"source": "pineconeUpsert_0",
"sourceHandle": "pineconeUpsert_0-output-pineconeUpsert-BaseRetriever",
"target": "conversationalRetrievalQAChain_0",
"targetHandle": "conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever",
"type": "buttonedge",
"id": "pineconeUpsert_0-pineconeUpsert_0-output-pineconeUpsert-BaseRetriever-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever",
"data": {
"label": ""
}
}
]
}

View File

@ -0,0 +1,252 @@
{
"description": "An agent that uses the ReAct Framework to decide what action to take",
"nodes": [
{
"width": 300,
"height": 279,
"id": "mrlkAgentLLM_0",
"position": {
"x": 1520.156054894558,
"y": 466.34196346475386
},
"type": "customNode",
"data": {
"id": "mrlkAgentLLM_0",
"label": "MRLK Agent for LLMs",
"name": "mrlkAgentLLM",
"type": "AgentExecutor",
"baseClasses": ["AgentExecutor"],
"category": "Agents",
"description": "Agent that uses the ReAct Framework to decide what action to take, optimized to be used with LLMs",
"inputParams": [],
"inputAnchors": [
{
"label": "Allowed Tools",
"name": "tools",
"type": "Tool",
"list": true,
"id": "mrlkAgentLLM_0-input-tools-Tool"
},
{
"label": "LLM Model",
"name": "model",
"type": "BaseLanguageModel",
"id": "mrlkAgentLLM_0-input-model-BaseLanguageModel"
}
],
"inputs": {
"tools": ["{{calculator_0.data.instance}}", "{{serpAPI_0.data.instance}}"],
"model": "{{openAI_0.data.instance}}"
},
"outputAnchors": [
{
"id": "mrlkAgentLLM_0-output-mrlkAgentLLM-AgentExecutor",
"name": "mrlkAgentLLM",
"label": "AgentExecutor",
"type": "AgentExecutor"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1520.156054894558,
"y": 466.34196346475386
},
"dragging": false
},
{
"width": 300,
"height": 142,
"id": "calculator_0",
"position": {
"x": 1141.0497522733922,
"y": 172.32224599434292
},
"type": "customNode",
"data": {
"id": "calculator_0",
"label": "Calculator",
"name": "calculator",
"type": "Calculator",
"baseClasses": ["Tool"],
"category": "Tools",
"description": "Perform calculations on response",
"inputParams": [],
"inputAnchors": [],
"inputs": {},
"outputAnchors": [
{
"id": "calculator_0-output-calculator-Tool",
"name": "calculator",
"label": "Calculator",
"type": "Tool"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1141.0497522733922,
"y": 172.32224599434292
},
"dragging": false
},
{
"width": 300,
"height": 277,
"id": "serpAPI_0",
"position": {
"x": 797.0350733284566,
"y": 199.36655049779267
},
"type": "customNode",
"data": {
"id": "serpAPI_0",
"label": "Serp API",
"name": "serpAPI",
"type": "SerpAPI",
"baseClasses": ["Tool"],
"category": "Tools",
"description": "Wrapper around SerpAPI - a real-time API to access Google search results",
"inputParams": [
{
"label": "Serp Api Key",
"name": "apiKey",
"type": "password"
}
],
"inputAnchors": [],
"inputs": {},
"outputAnchors": [
{
"id": "serpAPI_0-output-serpAPI-Tool",
"name": "serpAPI",
"label": "SerpAPI",
"type": "Tool"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 797.0350733284566,
"y": 199.36655049779267
},
"dragging": false
},
{
"width": 300,
"height": 471,
"id": "openAI_0",
"position": {
"x": 917.6484006031452,
"y": 522.1507882519595
},
"type": "customNode",
"data": {
"id": "openAI_0",
"label": "OpenAI",
"name": "openAI",
"type": "OpenAI",
"baseClasses": ["BaseLLM", "BaseLanguageModel"],
"category": "LLMs",
"description": "Wrapper around OpenAI large language models",
"inputParams": [
{
"label": "OpenAI Api Key",
"name": "openAIApiKey",
"type": "password"
},
{
"label": "Model Name",
"name": "modelName",
"type": "options",
"options": [
{
"label": "text-davinci-003",
"name": "text-davinci-003"
},
{
"label": "text-davinci-002",
"name": "text-davinci-002"
},
{
"label": "text-curie-001",
"name": "text-curie-001"
},
{
"label": "text-babbage-001",
"name": "text-babbage-001"
}
],
"default": "text-davinci-003",
"optional": true
},
{
"label": "Temperature",
"name": "temperature",
"type": "number",
"default": 0.7,
"optional": true
}
],
"inputAnchors": [],
"inputs": {
"modelName": "text-davinci-003",
"temperature": 0.7
},
"outputAnchors": [
{
"id": "openAI_0-output-openAI-BaseLLM|BaseLanguageModel",
"name": "openAI",
"label": "OpenAI",
"type": "BaseLLM | BaseLanguageModel"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 917.6484006031452,
"y": 522.1507882519595
},
"dragging": false
}
],
"edges": [
{
"source": "openAI_0",
"sourceHandle": "openAI_0-output-openAI-BaseLLM|BaseLanguageModel",
"target": "mrlkAgentLLM_0",
"targetHandle": "mrlkAgentLLM_0-input-model-BaseLanguageModel",
"type": "buttonedge",
"id": "openAI_0-openAI_0-output-openAI-BaseLLM|BaseLanguageModel-mrlkAgentLLM_0-mrlkAgentLLM_0-input-model-BaseLanguageModel",
"data": {
"label": ""
}
},
{
"source": "calculator_0",
"sourceHandle": "calculator_0-output-calculator-Tool",
"target": "mrlkAgentLLM_0",
"targetHandle": "mrlkAgentLLM_0-input-tools-Tool",
"type": "buttonedge",
"id": "calculator_0-calculator_0-output-calculator-Tool-mrlkAgentLLM_0-mrlkAgentLLM_0-input-tools-Tool",
"data": {
"label": ""
}
},
{
"source": "serpAPI_0",
"sourceHandle": "serpAPI_0-output-serpAPI-Tool",
"target": "mrlkAgentLLM_0",
"targetHandle": "mrlkAgentLLM_0-input-tools-Tool",
"type": "buttonedge",
"id": "serpAPI_0-serpAPI_0-output-serpAPI-Tool-mrlkAgentLLM_0-mrlkAgentLLM_0-input-tools-Tool",
"data": {
"label": ""
}
}
]
}

View File

@ -0,0 +1,217 @@
{
"description": "Basic example of LLM Chain with a Prompt Template and LLM Model",
"nodes": [
{
"width": 300,
"height": 460,
"id": "llmChain_0",
"position": {
"x": 1515.563392772433,
"y": 334.61150897841924
},
"type": "customNode",
"data": {
"id": "llmChain_0",
"label": "LLM Chain",
"name": "llmChain",
"type": "LLMChain",
"baseClasses": ["BaseChain"],
"category": "Chains",
"description": "Chain to run queries against LLMs",
"inputParams": [
{
"label": "Format Prompt Values",
"name": "promptValues",
"type": "string",
"rows": 5,
"placeholder": "{\n \"input_language\": \"English\",\n \"output_language\": \"French\"\n}",
"optional": true
}
],
"inputAnchors": [
{
"label": "LLM",
"name": "llm",
"type": "BaseLanguageModel",
"id": "llmChain_0-input-llm-BaseLanguageModel"
},
{
"label": "Prompt",
"name": "prompt",
"type": "BasePromptTemplate",
"id": "llmChain_0-input-prompt-BasePromptTemplate"
}
],
"inputs": {
"llm": "{{openAI_0.data.instance}}",
"prompt": "{{promptTemplate_0.data.instance}}",
"promptValues": ""
},
"outputAnchors": [
{
"id": "llmChain_0-output-llmChain-BaseChain",
"name": "llmChain",
"label": "LLMChain",
"type": "BaseChain"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1515.563392772433,
"y": 334.61150897841924
},
"dragging": false
},
{
"width": 300,
"height": 471,
"id": "openAI_0",
"position": {
"x": 954.7026430819806,
"y": 6.975032607064918
},
"type": "customNode",
"data": {
"id": "openAI_0",
"label": "OpenAI",
"name": "openAI",
"type": "OpenAI",
"baseClasses": ["BaseLLM", "BaseLanguageModel"],
"category": "LLMs",
"description": "Wrapper around OpenAI large language models",
"inputParams": [
{
"label": "OpenAI Api Key",
"name": "openAIApiKey",
"type": "password"
},
{
"label": "Model Name",
"name": "modelName",
"type": "options",
"options": [
{
"label": "text-davinci-003",
"name": "text-davinci-003"
},
{
"label": "text-davinci-002",
"name": "text-davinci-002"
},
{
"label": "text-curie-001",
"name": "text-curie-001"
},
{
"label": "text-babbage-001",
"name": "text-babbage-001"
}
],
"default": "text-davinci-003",
"optional": true
},
{
"label": "Temperature",
"name": "temperature",
"type": "number",
"default": 0.7,
"optional": true
}
],
"inputAnchors": [],
"inputs": {
"modelName": "text-davinci-003",
"temperature": 0.7
},
"outputAnchors": [
{
"id": "openAI_0-output-openAI-BaseLLM|BaseLanguageModel",
"name": "openAI",
"label": "OpenAI",
"type": "BaseLLM | BaseLanguageModel"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 954.7026430819806,
"y": 6.975032607064918
},
"dragging": false
},
{
"width": 300,
"height": 359,
"id": "promptTemplate_0",
"position": {
"x": 954.1542757936061,
"y": 515.2247261712328
},
"type": "customNode",
"data": {
"id": "promptTemplate_0",
"label": "Prompt Template",
"name": "promptTemplate",
"type": "PromptTemplate",
"baseClasses": ["BaseStringPromptTemplate", "BasePromptTemplate"],
"category": "Prompts",
"description": "Schema to represent a basic prompt for an LLM",
"inputParams": [
{
"label": "Template",
"name": "template",
"type": "string",
"rows": 5,
"placeholder": "What is a good name for a company that makes {product}?"
}
],
"inputAnchors": [],
"inputs": {
"template": "What is a good name for a company that makes {product}?"
},
"outputAnchors": [
{
"id": "promptTemplate_0-output-promptTemplate-BaseStringPromptTemplate|BasePromptTemplate",
"name": "promptTemplate",
"label": "PromptTemplate",
"type": "BaseStringPromptTemplate | BasePromptTemplate"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 954.1542757936061,
"y": 515.2247261712328
},
"dragging": false
}
],
"edges": [
{
"source": "openAI_0",
"sourceHandle": "openAI_0-output-openAI-BaseLLM|BaseLanguageModel",
"target": "llmChain_0",
"targetHandle": "llmChain_0-input-llm-BaseLanguageModel",
"type": "buttonedge",
"id": "openAI_0-openAI_0-output-openAI-BaseLLM|BaseLanguageModel-llmChain_0-llmChain_0-input-llm-BaseLanguageModel",
"data": {
"label": ""
}
},
{
"source": "promptTemplate_0",
"sourceHandle": "promptTemplate_0-output-promptTemplate-BaseStringPromptTemplate|BasePromptTemplate",
"target": "llmChain_0",
"targetHandle": "llmChain_0-input-prompt-BasePromptTemplate",
"type": "buttonedge",
"id": "promptTemplate_0-promptTemplate_0-output-promptTemplate-BaseStringPromptTemplate|BasePromptTemplate-llmChain_0-llmChain_0-input-prompt-BasePromptTemplate",
"data": {
"label": ""
}
}
]
}

View File

@ -0,0 +1,217 @@
{
"description": "Language translation using LLM Chain with a Chat Prompt Template and Chat Model",
"nodes": [
{
"width": 300,
"height": 460,
"id": "llmChain_0",
"position": {
"x": 1301.8762472836022,
"y": 772.7199253009146
},
"type": "customNode",
"data": {
"id": "llmChain_0",
"label": "LLM Chain",
"name": "llmChain",
"type": "LLMChain",
"baseClasses": ["BaseChain"],
"category": "Chains",
"description": "Chain to run queries against LLMs",
"inputParams": [
{
"label": "Format Prompt Values",
"name": "promptValues",
"type": "string",
"rows": 5,
"placeholder": "{\n \"input_language\": \"English\",\n \"output_language\": \"French\"\n}",
"optional": true
}
],
"inputAnchors": [
{
"label": "LLM",
"name": "llm",
"type": "BaseLanguageModel",
"id": "llmChain_0-input-llm-BaseLanguageModel"
},
{
"label": "Prompt",
"name": "prompt",
"type": "BasePromptTemplate",
"id": "llmChain_0-input-prompt-BasePromptTemplate"
}
],
"inputs": {
"llm": "{{chatOpenAI_0.data.instance}}",
"prompt": "{{chatPromptTemplate_0.data.instance}}",
"promptValues": "{\n \"input_language\": \"English\",\n \"output_language\": \"Italian\"\n}"
},
"outputAnchors": [
{
"id": "llmChain_0-output-llmChain-BaseChain",
"name": "llmChain",
"label": "LLMChain",
"type": "BaseChain"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1301.8762472836022,
"y": 772.7199253009146
},
"dragging": false
},
{
"width": 300,
"height": 471,
"id": "chatOpenAI_0",
"position": {
"x": 821.5000372338304,
"y": 369.8333649665954
},
"type": "customNode",
"data": {
"id": "chatOpenAI_0",
"label": "ChatOpenAI",
"name": "chatOpenAI",
"type": "ChatOpenAI",
"baseClasses": ["BaseChatModel", "BaseLanguageModel"],
"category": "Chat Models",
"description": "Wrapper around OpenAI large language models that use the Chat endpoint",
"inputParams": [
{
"label": "OpenAI Api Key",
"name": "openAIApiKey",
"type": "password"
},
{
"label": "Model Name",
"name": "modelName",
"type": "options",
"options": [
{
"label": "gpt-3.5-turbo",
"name": "gpt-3.5-turbo"
},
{
"label": "gpt-3.5-turbo-0301",
"name": "gpt-3.5-turbo-0301"
}
],
"default": "gpt-3.5-turbo",
"optional": true
},
{
"label": "Temperature",
"name": "temperature",
"type": "number",
"default": 0.9,
"optional": true
}
],
"inputAnchors": [],
"inputs": {
"modelName": "gpt-3.5-turbo",
"temperature": "0"
},
"outputAnchors": [
{
"id": "chatOpenAI_0-output-chatOpenAI-BaseChatModel|BaseLanguageModel",
"name": "chatOpenAI",
"label": "ChatOpenAI",
"type": "BaseChatModel | BaseLanguageModel"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 821.5000372338304,
"y": 369.8333649665954
},
"dragging": false
},
{
"width": 300,
"height": 459,
"id": "chatPromptTemplate_0",
"position": {
"x": 821.3479428749118,
"y": 871.7203878238932
},
"type": "customNode",
"data": {
"id": "chatPromptTemplate_0",
"label": "Chat Prompt Template",
"name": "chatPromptTemplate",
"type": "ChatPromptTemplate",
"baseClasses": ["BasePromptTemplate"],
"category": "Prompts",
"description": "Schema to represent a chat prompt",
"inputParams": [
{
"label": "System Message",
"name": "systemMessagePrompt",
"type": "string",
"rows": 3,
"placeholder": "You are a helpful assistant that translates {input_language} to {output_language}."
},
{
"label": "Human Message",
"name": "humanMessagePrompt",
"type": "string",
"rows": 3,
"placeholder": "{text}"
}
],
"inputAnchors": [],
"inputs": {
"systemMessagePrompt": "You are a helpful assistant that translates {input_language} to {output_language}.",
"humanMessagePrompt": "{input}"
},
"outputAnchors": [
{
"id": "chatPromptTemplate_0-output-chatPromptTemplate-BasePromptTemplate",
"name": "chatPromptTemplate",
"label": "ChatPromptTemplate",
"type": "BasePromptTemplate"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 821.3479428749118,
"y": 871.7203878238932
},
"dragging": false
}
],
"edges": [
{
"source": "chatOpenAI_0",
"sourceHandle": "chatOpenAI_0-output-chatOpenAI-BaseChatModel|BaseLanguageModel",
"target": "llmChain_0",
"targetHandle": "llmChain_0-input-llm-BaseLanguageModel",
"type": "buttonedge",
"id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-BaseChatModel|BaseLanguageModel-llmChain_0-llmChain_0-input-llm-BaseLanguageModel",
"data": {
"label": ""
}
},
{
"source": "chatPromptTemplate_0",
"sourceHandle": "chatPromptTemplate_0-output-chatPromptTemplate-BasePromptTemplate",
"target": "llmChain_0",
"targetHandle": "llmChain_0-input-prompt-BasePromptTemplate",
"type": "buttonedge",
"id": "chatPromptTemplate_0-chatPromptTemplate_0-output-chatPromptTemplate-BasePromptTemplate-llmChain_0-llmChain_0-input-prompt-BasePromptTemplate",
"data": {
"label": ""
}
}
]
}

View File

@ -0,0 +1,7 @@
import client from './client'
const getAllMarketplaces = () => client.get('/marketplaces')
export default {
getAllMarketplaces
}

View File

@ -1,8 +1,8 @@
// assets
import { IconHierarchy, IconKey, IconBook, IconListCheck } from '@tabler/icons'
import { IconHierarchy, IconBuildingStore } from '@tabler/icons'
// constant
const icons = { IconHierarchy, IconKey, IconBook, IconListCheck }
const icons = { IconHierarchy, IconBuildingStore }
// ==============================|| DASHBOARD MENU ITEMS ||============================== //
@ -18,6 +18,14 @@ const dashboard = {
url: '/chatflows',
icon: icons.IconHierarchy,
breadcrumbs: true
},
{
id: 'marketplaces',
title: 'Marketplaces',
type: 'item',
url: '/marketplaces',
icon: icons.IconBuildingStore,
breadcrumbs: true
}
]
}

View File

@ -6,6 +6,7 @@ import MinimalLayout from 'layout/MinimalLayout'
// canvas routing
const Canvas = Loadable(lazy(() => import('views/canvas')))
const MarketplaceCanvas = Loadable(lazy(() => import('views/marketplaces/MarketplaceCanvas')))
// ==============================|| CANVAS ROUTING ||============================== //
@ -20,6 +21,10 @@ const CanvasRoutes = {
{
path: '/canvas/:id',
element: <Canvas />
},
{
path: '/marketplace/:id',
element: <MarketplaceCanvas />
}
]
}

View File

@ -7,6 +7,9 @@ import Loadable from 'ui-component/loading/Loadable'
// chatflows routing
const Chatflows = Loadable(lazy(() => import('views/chatflows')))
// marketplaces routing
const Marketplaces = Loadable(lazy(() => import('views/marketplaces')))
// ==============================|| MAIN ROUTING ||============================== //
const MainRoutes = {
@ -20,6 +23,10 @@ const MainRoutes = {
{
path: '/chatflows',
element: <Chatflows />
},
{
path: '/marketplaces',
element: <Marketplaces />
}
]
}

View File

@ -45,9 +45,10 @@ const ItemCard = ({ isLoading, data, images, onClick }) => {
<CardWrapper border={false} content={false} onClick={onClick}>
<Box sx={{ p: 2.25 }}>
<Grid container direction='column'>
<div style={{ display: 'flex', flexDirection: 'row', alignItems: 'center' }}>
<div>
<Typography sx={{ fontSize: '1.5rem', fontWeight: 500 }}>{data.name}</Typography>
</div>
{data.description && <span style={{ marginTop: 10 }}>{data.description}</span>}
<Grid sx={{ mt: 1, mb: 1 }} container direction='row'>
{data.deployed && (
<Grid item>

View File

@ -18,7 +18,7 @@ const StyledPopper = styled(Popper)({
}
})
export const Dropdown = ({ name, value, options, onSelect }) => {
export const Dropdown = ({ name, value, options, onSelect, disabled = false }) => {
const customization = useSelector((state) => state.customization)
const findMatchingOptions = (options = [], value) => options.find((option) => option.name === value)
const getDefaultOptionValue = () => ''
@ -28,6 +28,7 @@ export const Dropdown = ({ name, value, options, onSelect }) => {
<FormControl sx={{ mt: 1, width: '100%' }} size='small'>
<Autocomplete
id={name}
disabled={disabled}
size='small'
options={options || []}
value={findMatchingOptions(options, internalValue) || getDefaultOptionValue()}
@ -57,5 +58,6 @@ Dropdown.propTypes = {
name: PropTypes.string,
value: PropTypes.string,
options: PropTypes.array,
onSelect: PropTypes.func
onSelect: PropTypes.func,
disabled: PropTypes.bool
}

View File

@ -5,7 +5,7 @@ import { FormControl, Button } from '@mui/material'
import { IconUpload } from '@tabler/icons'
import { getFileName } from 'utils/genericHelper'
export const File = ({ value, fileType, onChange }) => {
export const File = ({ value, fileType, onChange, disabled = false }) => {
const theme = useTheme()
const [myValue, setMyValue] = useState(value ?? '')
@ -42,7 +42,14 @@ export const File = ({ value, fileType, onChange }) => {
>
{myValue ? getFileName(myValue) : 'Choose a file to upload'}
</span>
<Button variant='outlined' component='label' fullWidth startIcon={<IconUpload />} sx={{ marginRight: '1rem' }}>
<Button
disabled={disabled}
variant='outlined'
component='label'
fullWidth
startIcon={<IconUpload />}
sx={{ marginRight: '1rem' }}
>
{'Upload File'}
<input type='file' accept={fileType} hidden onChange={(e) => handleFileUpload(e)} />
</Button>
@ -53,5 +60,6 @@ export const File = ({ value, fileType, onChange }) => {
File.propTypes = {
value: PropTypes.string,
fileType: PropTypes.string,
onChange: PropTypes.func
onChange: PropTypes.func,
disabled: PropTypes.bool
}

View File

@ -2,13 +2,14 @@ import { useState } from 'react'
import PropTypes from 'prop-types'
import { FormControl, OutlinedInput } from '@mui/material'
export const Input = ({ inputParam, value, onChange }) => {
export const Input = ({ inputParam, value, onChange, disabled = false }) => {
const [myValue, setMyValue] = useState(value ?? '')
return (
<FormControl sx={{ mt: 1, width: '100%' }} size='small'>
<OutlinedInput
id={inputParam.name}
size='small'
disabled={disabled}
type={inputParam.type === 'string' ? 'text' : inputParam.type}
placeholder={inputParam.placeholder}
multiline={!!inputParam.rows}
@ -28,5 +29,6 @@ export const Input = ({ inputParam, value, onChange }) => {
Input.propTypes = {
inputParam: PropTypes.object,
value: PropTypes.string,
onChange: PropTypes.func
onChange: PropTypes.func,
disabled: PropTypes.bool
}

View File

@ -203,12 +203,14 @@ export const generateExportFlowData = (flowData) => {
selected: false
}
// Remove password
// Remove password, file & folder
if (node.data.inputs && Object.keys(node.data.inputs).length) {
const nodeDataInputs = {}
for (const input in node.data.inputs) {
const inputParam = node.data.inputParams.find((inp) => inp.name === input)
if (inputParam && inputParam.type === 'password') continue
if (inputParam && inputParam.type === 'file') continue
if (inputParam && inputParam.type === 'folder') continue
nodeDataInputs[input] = node.data.inputs[input]
}
newNodeData.inputs = nodeDataInputs

View File

@ -14,7 +14,7 @@ import { isValidConnection } from 'utils/genericHelper'
// ===========================|| NodeInputHandler ||=========================== //
const NodeInputHandler = ({ inputAnchor, inputParam, data }) => {
const NodeInputHandler = ({ inputAnchor, inputParam, data, disabled = false }) => {
const theme = useTheme()
const ref = useRef(null)
const updateNodeInternals = useUpdateNodeInternals()
@ -76,6 +76,7 @@ const NodeInputHandler = ({ inputAnchor, inputParam, data }) => {
</Typography>
{inputParam.type === 'file' && (
<File
disabled={disabled}
fileType={inputParam.fileType || '*'}
onChange={(newValue) => (data.inputs[inputParam.name] = newValue)}
value={data.inputs[inputParam.name] ?? inputParam.default ?? 'Choose a file to upload'}
@ -83,6 +84,7 @@ const NodeInputHandler = ({ inputAnchor, inputParam, data }) => {
)}
{(inputParam.type === 'string' || inputParam.type === 'password' || inputParam.type === 'number') && (
<Input
disabled={disabled}
inputParam={inputParam}
onChange={(newValue) => (data.inputs[inputParam.name] = newValue)}
value={data.inputs[inputParam.name] ?? inputParam.default ?? ''}
@ -90,6 +92,7 @@ const NodeInputHandler = ({ inputAnchor, inputParam, data }) => {
)}
{inputParam.type === 'options' && (
<Dropdown
disabled={disabled}
name={inputParam.name}
options={inputParam.options}
onSelect={(newValue) => (data.inputs[inputParam.name] = newValue)}
@ -106,7 +109,8 @@ const NodeInputHandler = ({ inputAnchor, inputParam, data }) => {
NodeInputHandler.propTypes = {
inputAnchor: PropTypes.object,
inputParam: PropTypes.object,
data: PropTypes.object
data: PropTypes.object,
disabled: PropTypes.bool
}
export default NodeInputHandler

View File

@ -3,7 +3,7 @@ import ReactFlow, { addEdge, Controls, Background, useNodesState, useEdgesState
import 'reactflow/dist/style.css'
import { useDispatch, useSelector } from 'react-redux'
import { useNavigate } from 'react-router-dom'
import { useNavigate, useLocation } from 'react-router-dom'
import { usePrompt } from '../../utils/usePrompt'
import {
REMOVE_DIRTY,
@ -50,6 +50,9 @@ const Canvas = () => {
const theme = useTheme()
const navigate = useNavigate()
const { state } = useLocation()
const templateFlowData = state ? state.templateFlowData : ''
const URLpath = document.location.pathname.toString().split('/')
const chatflowId = URLpath[URLpath.length - 1] === 'canvas' ? '' : URLpath[URLpath.length - 1]
@ -59,6 +62,7 @@ const Canvas = () => {
const canvas = useSelector((state) => state.canvas)
const [canvasDataStore, setCanvasDataStore] = useState(canvas)
const [chatflow, setChatflow] = useState(null)
const { reactFlowInstance, setReactFlowInstance } = useContext(flowContext)
// ==============================|| Snackbar ||============================== //
@ -437,6 +441,14 @@ const Canvas = () => {
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [])
useEffect(() => {
if (templateFlowData && templateFlowData.includes('"nodes":[') && templateFlowData.includes('],"edges":[')) {
handleLoadFlow(templateFlowData)
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [templateFlowData])
usePrompt('You have unsaved changes! Do you want to navigate away?', canvasDataStore.isDirty)
return (

View File

@ -22,6 +22,9 @@ import useApi from 'hooks/useApi'
// const
import { baseURL } from 'store/constant'
// icons
import { IconPlus } from '@tabler/icons'
// ==============================|| CHATFLOWS ||============================== //
const Chatflows = () => {
@ -83,7 +86,7 @@ const Chatflows = () => {
<Grid sx={{ mb: 1.25 }} container direction='row'>
<Box sx={{ flexGrow: 1 }} />
<Grid item>
<StyledButton variant='contained' sx={{ color: 'white' }} onClick={addNew}>
<StyledButton variant='contained' sx={{ color: 'white' }} onClick={addNew} startIcon={<IconPlus />}>
Add New
</StyledButton>
</Grid>

View File

@ -0,0 +1,105 @@
import { useEffect, useRef } from 'react'
import ReactFlow, { Controls, Background, useNodesState, useEdgesState } from 'reactflow'
import 'reactflow/dist/style.css'
import 'views/canvas/index.css'
import { useLocation, useNavigate } from 'react-router-dom'
// material-ui
import { Toolbar, Box, AppBar } from '@mui/material'
import { useTheme } from '@mui/material/styles'
// project imports
import MarketplaceCanvasNode from './MarketplaceCanvasNode'
import MarketplaceCanvasHeader from './MarketplaceCanvasHeader'
const nodeTypes = { customNode: MarketplaceCanvasNode }
const edgeTypes = { buttonedge: '' }
// ==============================|| CANVAS ||============================== //
const MarketplaceCanvas = () => {
const theme = useTheme()
const navigate = useNavigate()
const { state } = useLocation()
const { flowData, name } = state
// ==============================|| ReactFlow ||============================== //
const [nodes, setNodes, onNodesChange] = useNodesState()
const [edges, setEdges, onEdgesChange] = useEdgesState()
const reactFlowWrapper = useRef(null)
// ==============================|| useEffect ||============================== //
useEffect(() => {
if (flowData) {
const initialFlow = JSON.parse(flowData)
setNodes(initialFlow.nodes || [])
setEdges(initialFlow.edges || [])
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [flowData])
const onChatflowCopy = (flowData) => {
//navigator.clipboard.writeText(JSON.stringify(flowData))
const templateFlowData = JSON.stringify(flowData)
navigate(`/canvas`, { state: { templateFlowData } })
}
return (
<>
<Box>
<AppBar
enableColorOnDark
position='fixed'
color='inherit'
elevation={1}
sx={{
bgcolor: theme.palette.background.default
}}
>
<Toolbar>
<MarketplaceCanvasHeader
flowName={name}
flowData={JSON.parse(flowData)}
onChatflowCopy={(flowData) => onChatflowCopy(flowData)}
/>
</Toolbar>
</AppBar>
<Box sx={{ pt: '70px', height: '100vh', width: '100%' }}>
<div className='reactflow-parent-wrapper'>
<div className='reactflow-wrapper' ref={reactFlowWrapper}>
<ReactFlow
nodes={nodes}
edges={edges}
onNodesChange={onNodesChange}
onEdgesChange={onEdgesChange}
nodesDraggable={false}
nodeTypes={nodeTypes}
edgeTypes={edgeTypes}
fitView
>
<Controls
style={{
display: 'flex',
flexDirection: 'row',
left: '50%',
transform: 'translate(-50%, -50%)'
}}
/>
<Background color='#aaa' gap={16} />
</ReactFlow>
</div>
</div>
</Box>
</Box>
</>
)
}
export default MarketplaceCanvas

View File

@ -0,0 +1,76 @@
import PropTypes from 'prop-types'
import { useNavigate } from 'react-router-dom'
// material-ui
import { useTheme } from '@mui/material/styles'
import { Avatar, Box, ButtonBase, Typography, Stack } from '@mui/material'
import { StyledButton } from 'ui-component/button/StyledButton'
// icons
import { IconCopy, IconChevronLeft } from '@tabler/icons'
// ==============================|| CANVAS HEADER ||============================== //
const MarketplaceCanvasHeader = ({ flowName, flowData, onChatflowCopy }) => {
const theme = useTheme()
const navigate = useNavigate()
return (
<>
<Box>
<ButtonBase title='Back' sx={{ borderRadius: '50%' }}>
<Avatar
variant='rounded'
sx={{
...theme.typography.commonAvatar,
...theme.typography.mediumAvatar,
transition: 'all .2s ease-in-out',
background: theme.palette.secondary.light,
color: theme.palette.secondary.dark,
'&:hover': {
background: theme.palette.secondary.dark,
color: theme.palette.secondary.light
}
}}
color='inherit'
onClick={() => navigate(-1)}
>
<IconChevronLeft stroke={1.5} size='1.3rem' />
</Avatar>
</ButtonBase>
</Box>
<Box sx={{ flexGrow: 1 }}>
<Stack flexDirection='row'>
<Typography
sx={{
fontSize: '1.5rem',
fontWeight: 600,
ml: 2
}}
>
{flowName}
</Typography>
</Stack>
</Box>
<Box>
<StyledButton
color='secondary'
variant='contained'
title='Use Chatflow'
onClick={() => onChatflowCopy(flowData)}
startIcon={<IconCopy />}
>
Use Template
</StyledButton>
</Box>
</>
)
}
MarketplaceCanvasHeader.propTypes = {
flowName: PropTypes.string,
flowData: PropTypes.object,
onChatflowCopy: PropTypes.func
}
export default MarketplaceCanvasHeader

View File

@ -0,0 +1,123 @@
import PropTypes from 'prop-types'
// material-ui
import { styled, useTheme } from '@mui/material/styles'
import { Box, Typography, Divider } from '@mui/material'
// project imports
import MainCard from 'ui-component/cards/MainCard'
import NodeInputHandler from 'views/canvas/NodeInputHandler'
import NodeOutputHandler from 'views/canvas/NodeOutputHandler'
// const
import { baseURL } from 'store/constant'
const CardWrapper = styled(MainCard)(({ theme }) => ({
background: theme.palette.card.main,
color: theme.darkTextPrimary,
border: 'solid 1px',
borderColor: theme.palette.primary[200] + 75,
width: '300px',
height: 'auto',
padding: '10px',
boxShadow: '0 2px 14px 0 rgb(32 40 45 / 8%)',
'&:hover': {
borderColor: theme.palette.primary.main
}
}))
// ===========================|| CANVAS NODE ||=========================== //
const MarketplaceCanvasNode = ({ data }) => {
const theme = useTheme()
return (
<>
<CardWrapper
content={false}
sx={{
padding: 0,
borderColor: data.selected ? theme.palette.primary.main : theme.palette.text.secondary
}}
border={false}
>
<Box>
<div style={{ display: 'flex', flexDirection: 'row', alignItems: 'center' }}>
<Box style={{ width: 50, marginRight: 10, padding: 5 }}>
<div
style={{
...theme.typography.commonAvatar,
...theme.typography.largeAvatar,
borderRadius: '50%',
backgroundColor: 'white',
cursor: 'grab'
}}
>
<img
style={{ width: '100%', height: '100%', padding: 5, objectFit: 'contain' }}
src={`${baseURL}/api/v1/node-icon/${data.name}`}
alt='Notification'
/>
</div>
</Box>
<Box>
<Typography
sx={{
fontSize: '1rem',
fontWeight: 500
}}
>
{data.label}
</Typography>
</Box>
</div>
{(data.inputAnchors.length > 0 || data.inputParams.length > 0) && (
<>
<Divider />
<Box sx={{ background: theme.palette.asyncSelect.main, p: 1 }}>
<Typography
sx={{
fontWeight: 500,
textAlign: 'center'
}}
>
Inputs
</Typography>
</Box>
<Divider />
</>
)}
{data.inputAnchors.map((inputAnchor, index) => (
<NodeInputHandler key={index} inputAnchor={inputAnchor} data={data} />
))}
{data.inputParams.map((inputParam, index) => (
<NodeInputHandler disabled={true} key={index} inputParam={inputParam} data={data} />
))}
<Divider />
<Box sx={{ background: theme.palette.asyncSelect.main, p: 1 }}>
<Typography
sx={{
fontWeight: 500,
textAlign: 'center'
}}
>
Output
</Typography>
</Box>
<Divider />
{data.outputAnchors.map((outputAnchor, index) => (
<NodeOutputHandler key={index} outputAnchor={outputAnchor} data={data} />
))}
</Box>
</CardWrapper>
</>
)
}
MarketplaceCanvasNode.propTypes = {
data: PropTypes.object
}
export default MarketplaceCanvasNode

View File

@ -0,0 +1,101 @@
import { useEffect, useState } from 'react'
import { useNavigate } from 'react-router-dom'
import { useSelector } from 'react-redux'
// material-ui
import { Grid, Box, Stack } from '@mui/material'
import { useTheme } from '@mui/material/styles'
// project imports
import MainCard from 'ui-component/cards/MainCard'
import ItemCard from 'ui-component/cards/ItemCard'
import { gridSpacing } from 'store/constant'
import WorkflowEmptySVG from 'assets/images/workflow_empty.svg'
// API
import marketplacesApi from 'api/marketplaces'
// Hooks
import useApi from 'hooks/useApi'
// const
import { baseURL } from 'store/constant'
// ==============================|| Marketplace ||============================== //
const Marketplace = () => {
const navigate = useNavigate()
const theme = useTheme()
const customization = useSelector((state) => state.customization)
const [isLoading, setLoading] = useState(true)
const [images, setImages] = useState({})
const getAllMarketplacesApi = useApi(marketplacesApi.getAllMarketplaces)
const goToCanvas = (selectedChatflow) => {
navigate(`/marketplace/${selectedChatflow.id}`, { state: selectedChatflow })
}
useEffect(() => {
getAllMarketplacesApi.request()
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [])
useEffect(() => {
setLoading(getAllMarketplacesApi.loading)
}, [getAllMarketplacesApi.loading])
useEffect(() => {
if (getAllMarketplacesApi.data) {
try {
const chatflows = getAllMarketplacesApi.data
const images = {}
for (let i = 0; i < chatflows.length; i += 1) {
const flowDataStr = chatflows[i].flowData
const flowData = JSON.parse(flowDataStr)
const nodes = flowData.nodes || []
images[chatflows[i].id] = []
for (let j = 0; j < nodes.length; j += 1) {
const imageSrc = `${baseURL}/api/v1/node-icon/${nodes[j].data.name}`
if (!images[chatflows[i].id].includes(imageSrc)) {
images[chatflows[i].id].push(imageSrc)
}
}
}
setImages(images)
} catch (e) {
console.error(e)
}
}
}, [getAllMarketplacesApi.data])
return (
<MainCard sx={{ background: customization.isDarkMode ? theme.palette.common.black : '' }}>
<Stack flexDirection='row'>
<h1>Marketplace</h1>
</Stack>
<Grid container spacing={gridSpacing}>
{!isLoading &&
getAllMarketplacesApi.data &&
getAllMarketplacesApi.data.map((data, index) => (
<Grid key={index} item lg={3} md={4} sm={6} xs={12}>
<ItemCard onClick={() => goToCanvas(data)} data={data} images={images[data.id]} />
</Grid>
))}
</Grid>
{!isLoading && (!getAllMarketplacesApi.data || getAllMarketplacesApi.data.length === 0) && (
<Stack sx={{ alignItems: 'center', justifyContent: 'center' }} flexDirection='column'>
<Box sx={{ p: 2, height: 'auto' }}>
<img style={{ objectFit: 'cover', height: '30vh', width: 'auto' }} src={WorkflowEmptySVG} alt='WorkflowEmptySVG' />
</Box>
<div>No Marketplace Yet</div>
</Stack>
)}
</MainCard>
)
}
export default Marketplace