add export code functionality
This commit is contained in:
parent
ed2bcda91b
commit
f3e8d7e89a
|
|
@ -20,8 +20,8 @@
|
|||
"clean": "npm exec -ws -- rimraf dist build",
|
||||
"format": "prettier --write \"**/*.{ts,tsx,md}\"",
|
||||
"test": "turbo run test",
|
||||
"lint": "eslint \"**/*.{js,jsx,ts,tsx,json,md}\" && cd packages/embed && yarn lint",
|
||||
"lint-fix": "yarn lint --fix && cd packages/embed && yarn lint-fix",
|
||||
"lint": "eslint \"**/*.{js,jsx,ts,tsx,json,md}\"",
|
||||
"lint-fix": "yarn lint --fix",
|
||||
"quick": "pretty-quick --staged",
|
||||
"postinstall": "husky install"
|
||||
},
|
||||
|
|
|
|||
|
|
@ -62,6 +62,33 @@ class ConversationalAgent_Agents implements INode {
|
|||
|
||||
return result?.output
|
||||
}
|
||||
|
||||
jsCodeImport(): string {
|
||||
return `import { initializeAgentExecutorWithOptions } from 'langchain/agents'`
|
||||
}
|
||||
|
||||
jsCode(nodeData: INodeData): string {
|
||||
const tools = nodeData.inputs?.tools as string
|
||||
const model = nodeData.inputs?.model as string
|
||||
const memory = nodeData.inputs?.memory as string
|
||||
|
||||
const code = `const input = "<your question>"
|
||||
const tools = ${tools}
|
||||
const model = ${model}
|
||||
const memory = ${memory}
|
||||
|
||||
const executor = await initializeAgentExecutorWithOptions(tools, model, {
|
||||
agentType: 'chat-conversational-react-description',
|
||||
verbose: true
|
||||
})
|
||||
executor.memory = memory
|
||||
|
||||
const result = await executor.call({ input })
|
||||
|
||||
console.log(result)
|
||||
`
|
||||
return code
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: ConversationalAgent_Agents }
|
||||
|
|
|
|||
|
|
@ -53,6 +53,30 @@ class MRKLAgentChat_Agents implements INode {
|
|||
|
||||
return result?.output
|
||||
}
|
||||
|
||||
jsCodeImport(): string {
|
||||
return `import { initializeAgentExecutorWithOptions } from 'langchain/agents'`
|
||||
}
|
||||
|
||||
jsCode(nodeData: INodeData): string {
|
||||
const tools = nodeData.inputs?.tools as string
|
||||
const model = nodeData.inputs?.model as string
|
||||
|
||||
const code = `const input = "<your question>"
|
||||
const tools = ${tools}
|
||||
const model = ${model}
|
||||
|
||||
const executor = await initializeAgentExecutorWithOptions(tools, model, {
|
||||
agentType: 'chat-zero-shot-react-description',
|
||||
verbose: true
|
||||
})
|
||||
|
||||
const result = await executor.call({ input })
|
||||
|
||||
console.log(result)
|
||||
`
|
||||
return code
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: MRKLAgentChat_Agents }
|
||||
|
|
|
|||
|
|
@ -54,6 +54,30 @@ class MRKLAgentLLM_Agents implements INode {
|
|||
|
||||
return result?.output
|
||||
}
|
||||
|
||||
jsCodeImport(): string {
|
||||
return `import { initializeAgentExecutorWithOptions } from 'langchain/agents'`
|
||||
}
|
||||
|
||||
jsCode(nodeData: INodeData): string {
|
||||
const tools = nodeData.inputs?.tools as string
|
||||
const model = nodeData.inputs?.model as string
|
||||
|
||||
const code = `const input = "<your question>"
|
||||
const tools = ${tools}
|
||||
const model = ${model}
|
||||
|
||||
const executor = await initializeAgentExecutorWithOptions(tools, model, {
|
||||
agentType: 'zero-shot-react-description',
|
||||
verbose: true
|
||||
})
|
||||
|
||||
const result = await executor.call({ input })
|
||||
|
||||
console.log(result)
|
||||
`
|
||||
return code
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: MRKLAgentLLM_Agents }
|
||||
|
|
|
|||
|
|
@ -66,6 +66,30 @@ class ConversationalRetrievalQAChain_Chains implements INode {
|
|||
|
||||
return res?.text
|
||||
}
|
||||
|
||||
jsCodeImport(): string {
|
||||
return `import { ConversationalRetrievalQAChain } from 'langchain/chains'`
|
||||
}
|
||||
|
||||
jsCode(nodeData: INodeData): string {
|
||||
const llm = nodeData.inputs?.llm as string
|
||||
const vectorStoreRetriever = nodeData.inputs?.vectorStoreRetriever as string
|
||||
|
||||
const code = `const input = "<your question>"
|
||||
const chatHistory = "<your chat history>"
|
||||
const llm = ${llm}
|
||||
${vectorStoreRetriever}
|
||||
|
||||
const chain = await ConversationalRetrievalQAChain.fromLLM(llm, vectorStoreRetriever)
|
||||
const result = await chain.call({
|
||||
question: input,
|
||||
chat_history: chatHistory ? chatHistory : []
|
||||
})
|
||||
|
||||
console.log(result)
|
||||
`
|
||||
return code
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: ConversationalRetrievalQAChain_Chains }
|
||||
|
|
|
|||
|
|
@ -77,6 +77,23 @@ class ChatOpenAI_ChatModels implements INode {
|
|||
})
|
||||
return model
|
||||
}
|
||||
|
||||
jsCodeImport(): string {
|
||||
return `import { ChatOpenAI } from 'langchain/chat_models/openai'`
|
||||
}
|
||||
|
||||
jsCode(nodeData: INodeData): string {
|
||||
const temperature = nodeData.inputs?.temperature as string
|
||||
const modelName = nodeData.inputs?.modelName as string
|
||||
const openAIApiKey = nodeData.inputs?.openAIApiKey as string
|
||||
|
||||
const code = `new ChatOpenAI({
|
||||
temperature: ${temperature},
|
||||
modelName: "${modelName}",
|
||||
openAIApiKey: "${openAIApiKey}"
|
||||
})`
|
||||
return code
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: ChatOpenAI_ChatModels }
|
||||
|
|
|
|||
|
|
@ -84,6 +84,36 @@ class Pdf_DocumentLoaders implements INode {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
jsCodeImport(): string {
|
||||
return `import { PDFLoader } from 'langchain/document_loaders/fs/pdf'`
|
||||
}
|
||||
|
||||
jsCode(nodeData: INodeData): string {
|
||||
const textSplitter = nodeData.inputs?.textSplitter as TextSplitter
|
||||
const usage = nodeData.inputs?.usage as string
|
||||
const pdfFileBase64 = nodeData.inputs?.pdfFile as string
|
||||
|
||||
const splitDataURI = pdfFileBase64.split(',')
|
||||
const filePath = splitDataURI.pop()
|
||||
const fileName = filePath?.split('filename:')[1] ?? ''
|
||||
|
||||
if (usage === 'perFile') {
|
||||
const code = `const loader = new PDFLoader("${fileName}", { splitPages: false })`
|
||||
if (textSplitter) {
|
||||
return `${code}\nconst docs = await loader.loadAndSplit(${textSplitter})`
|
||||
} else {
|
||||
return `${code}\nconst docs = await loader.load()`
|
||||
}
|
||||
} else {
|
||||
const code = `const loader = new PDFLoader("${fileName}")`
|
||||
if (textSplitter) {
|
||||
return `${code}\nconst docs = await loader.loadAndSplit(${textSplitter})`
|
||||
} else {
|
||||
return `${code}\nconst docs = await loader.load()`
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: Pdf_DocumentLoaders }
|
||||
|
|
|
|||
|
|
@ -35,6 +35,16 @@ class CohereEmbedding_Embeddings implements INode {
|
|||
const model = new CohereEmbeddings({ apiKey })
|
||||
return model
|
||||
}
|
||||
|
||||
jsCodeImport(): string {
|
||||
return `import { CohereEmbeddings } from 'langchain/embeddings/cohere'`
|
||||
}
|
||||
|
||||
jsCode(nodeData: INodeData): string {
|
||||
const apiKey = nodeData.inputs?.cohereApiKey as string
|
||||
const code = `new CohereEmbeddings({ apiKey: "${apiKey}" })`
|
||||
return code
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: CohereEmbedding_Embeddings }
|
||||
|
|
|
|||
|
|
@ -35,6 +35,16 @@ class OpenAIEmbedding_Embeddings implements INode {
|
|||
const model = new OpenAIEmbeddings({ openAIApiKey })
|
||||
return model
|
||||
}
|
||||
|
||||
jsCodeImport(): string {
|
||||
return `import { OpenAIEmbeddings } from 'langchain/embeddings/openai'`
|
||||
}
|
||||
|
||||
jsCode(nodeData: INodeData): string {
|
||||
const openAIApiKey = nodeData.inputs?.openAIApiKey as string
|
||||
const code = `new OpenAIEmbeddings({ openAIApiKey: "${openAIApiKey}" })`
|
||||
return code
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: OpenAIEmbedding_Embeddings }
|
||||
|
|
|
|||
|
|
@ -45,6 +45,21 @@ class HuggingFaceInference_LLMs implements INode {
|
|||
})
|
||||
return huggingFace
|
||||
}
|
||||
|
||||
jsCodeImport(): string {
|
||||
return `import { HuggingFaceInference } from 'langchain/llms/hf'`
|
||||
}
|
||||
|
||||
jsCode(nodeData: INodeData): string {
|
||||
const model = nodeData.inputs?.model as string
|
||||
const apiKey = nodeData.inputs?.apiKey as string
|
||||
|
||||
const code = `new HuggingFaceInference({
|
||||
model: ${model},
|
||||
apiKey: "${apiKey}"
|
||||
})`
|
||||
return code
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: HuggingFaceInference_LLMs }
|
||||
|
|
|
|||
|
|
@ -73,6 +73,23 @@ class OpenAI_LLMs implements INode {
|
|||
})
|
||||
return model
|
||||
}
|
||||
|
||||
jsCodeImport(): string {
|
||||
return `import { OpenAI } from 'langchain/llms/openai'`
|
||||
}
|
||||
|
||||
jsCode(nodeData: INodeData): string {
|
||||
const temperature = nodeData.inputs?.temperature as string
|
||||
const modelName = nodeData.inputs?.modelName as string
|
||||
const openAIApiKey = nodeData.inputs?.openAIApiKey as string
|
||||
|
||||
const code = `new OpenAI({
|
||||
temperature: ${temperature},
|
||||
modelName: "${modelName}",
|
||||
openAIApiKey: "${openAIApiKey}"
|
||||
})`
|
||||
return code
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: OpenAI_LLMs }
|
||||
|
|
|
|||
|
|
@ -45,6 +45,22 @@ class BufferMemory_Memory implements INode {
|
|||
inputKey
|
||||
})
|
||||
}
|
||||
|
||||
jsCodeImport(): string {
|
||||
return `import { BufferMemory } from 'langchain/memory'`
|
||||
}
|
||||
|
||||
jsCode(nodeData: INodeData): string {
|
||||
const memoryKey = nodeData.inputs?.memoryKey as string
|
||||
const inputKey = nodeData.inputs?.inputKey as string
|
||||
|
||||
const code = `new BufferMemory({
|
||||
returnMessages: true,
|
||||
memoryKey: "${memoryKey}",
|
||||
inputKey: "${inputKey}"
|
||||
})`
|
||||
return code
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: BufferMemory_Memory }
|
||||
|
|
|
|||
|
|
@ -50,6 +50,23 @@ class RecursiveCharacterTextSplitter_TextSplitters implements INode {
|
|||
|
||||
return splitter
|
||||
}
|
||||
|
||||
jsCodeImport(): string {
|
||||
return `import { RecursiveCharacterTextSplitter } from 'langchain/text_splitter'`
|
||||
}
|
||||
|
||||
jsCode(nodeData: INodeData): string {
|
||||
const chunkSize = nodeData.inputs?.chunkSize as string
|
||||
const chunkOverlap = nodeData.inputs?.chunkOverlap as string
|
||||
|
||||
const obj = {} as RecursiveCharacterTextSplitterParams
|
||||
|
||||
if (chunkSize) obj.chunkSize = parseInt(chunkSize, 10)
|
||||
if (chunkOverlap) obj.chunkOverlap = parseInt(chunkOverlap, 10)
|
||||
|
||||
const code = `new RecursiveCharacterTextSplitter(${JSON.stringify(obj)})`
|
||||
return code
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: RecursiveCharacterTextSplitter_TextSplitters }
|
||||
|
|
|
|||
|
|
@ -36,6 +36,17 @@ class AIPlugin implements INode {
|
|||
|
||||
return aiplugin
|
||||
}
|
||||
|
||||
jsCodeImport(): string {
|
||||
return `import { AIPluginTool } from 'langchain/tools'`
|
||||
}
|
||||
|
||||
jsCode(nodeData: INodeData): string {
|
||||
const pluginUrl = nodeData.inputs?.pluginUrl as string
|
||||
|
||||
const code = `await AIPluginTool.fromPluginUrl("${pluginUrl}")`
|
||||
return code
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: AIPlugin }
|
||||
|
|
|
|||
|
|
@ -24,6 +24,15 @@ class Calculator_Tools implements INode {
|
|||
async init(): Promise<any> {
|
||||
return new Calculator()
|
||||
}
|
||||
|
||||
jsCodeImport(): string {
|
||||
return `import { Calculator } from 'langchain/tools/calculator'`
|
||||
}
|
||||
|
||||
jsCode(): string {
|
||||
const code = `new Calculator()`
|
||||
return code
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: Calculator_Tools }
|
||||
|
|
|
|||
|
|
@ -33,6 +33,15 @@ class SerpAPI_Tools implements INode {
|
|||
const apiKey = nodeData.inputs?.apiKey as string
|
||||
return new SerpAPI(apiKey)
|
||||
}
|
||||
|
||||
jsCodeImport(): string {
|
||||
return `import { SerpAPI } from 'langchain/tools'`
|
||||
}
|
||||
|
||||
jsCode(): string {
|
||||
const code = `new SerpAPI()`
|
||||
return code
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: SerpAPI_Tools }
|
||||
|
|
|
|||
|
|
@ -65,6 +65,26 @@ class Chroma_Existing_VectorStores implements INode {
|
|||
}
|
||||
return vectorStore
|
||||
}
|
||||
|
||||
jsCodeImport(): string {
|
||||
return `import { Chroma } from 'langchain/vectorstores/chroma'`
|
||||
}
|
||||
|
||||
jsCode(nodeData: INodeData): string {
|
||||
const collectionName = nodeData.inputs?.collectionName as string
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const output = nodeData.outputs?.output as string
|
||||
|
||||
const code = `const embeddings = ${embeddings}
|
||||
|
||||
const vectorStore = await Chroma.fromExistingCollection(embeddings, {
|
||||
collectionName: "${collectionName}"
|
||||
})`
|
||||
if (output === 'retriever') {
|
||||
return `${code}\nconst vectorStoreRetriever = vectorStore.asRetriever()`
|
||||
}
|
||||
return code
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: Chroma_Existing_VectorStores }
|
||||
|
|
|
|||
|
|
@ -77,6 +77,29 @@ class ChromaUpsert_VectorStores implements INode {
|
|||
}
|
||||
return vectorStore
|
||||
}
|
||||
|
||||
jsCodeImport(): string {
|
||||
return `import { Chroma } from 'langchain/vectorstores/chroma'`
|
||||
}
|
||||
|
||||
jsCode(nodeData: INodeData): string {
|
||||
const collectionName = nodeData.inputs?.collectionName as string
|
||||
const docs = nodeData.inputs?.document as string
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const output = nodeData.outputs?.output as string
|
||||
|
||||
const code = `${docs}
|
||||
|
||||
const embeddings = ${embeddings}
|
||||
|
||||
const vectorStore = await Chroma.fromDocuments(docs, embeddings, {
|
||||
collectionName: "${collectionName}"
|
||||
})`
|
||||
if (output === 'retriever') {
|
||||
return `${code}\nconst vectorStoreRetriever = vectorStore.asRetriever()`
|
||||
}
|
||||
return code
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: ChromaUpsert_VectorStores }
|
||||
|
|
|
|||
|
|
@ -86,6 +86,37 @@ class Pinecone_Existing_VectorStores implements INode {
|
|||
}
|
||||
return vectorStore
|
||||
}
|
||||
|
||||
jsCodeImport(): string {
|
||||
return `import { PineconeClient } from '@pinecone-database/pinecone'
|
||||
import { PineconeStore } from 'langchain/vectorstores/pinecone'`
|
||||
}
|
||||
|
||||
jsCode(nodeData: INodeData): string {
|
||||
const pineconeApiKey = nodeData.inputs?.pineconeApiKey as string
|
||||
const pineconeEnv = nodeData.inputs?.pineconeEnv as string
|
||||
const index = nodeData.inputs?.pineconeIndex as string
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const output = nodeData.outputs?.output as string
|
||||
|
||||
const code = `const client = new PineconeClient()
|
||||
await client.init({
|
||||
apiKey: "${pineconeApiKey}",
|
||||
environment: "${pineconeEnv}"
|
||||
})
|
||||
|
||||
const pineconeIndex = client.Index("${index}")
|
||||
|
||||
const embeddings = ${embeddings}
|
||||
|
||||
const vectorStore = await PineconeStore.fromExistingIndex(embeddings, {
|
||||
pineconeIndex
|
||||
})`
|
||||
if (output === 'retriever') {
|
||||
return `${code}\nconst vectorStoreRetriever = vectorStore.asRetriever()`
|
||||
}
|
||||
return code
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: Pinecone_Existing_VectorStores }
|
||||
|
|
|
|||
|
|
@ -98,6 +98,40 @@ class PineconeUpsert_VectorStores implements INode {
|
|||
}
|
||||
return vectorStore
|
||||
}
|
||||
|
||||
jsCodeImport(): string {
|
||||
return `import { PineconeClient } from '@pinecone-database/pinecone'
|
||||
import { PineconeStore } from 'langchain/vectorstores/pinecone'`
|
||||
}
|
||||
|
||||
jsCode(nodeData: INodeData): string {
|
||||
const pineconeApiKey = nodeData.inputs?.pineconeApiKey as string
|
||||
const pineconeEnv = nodeData.inputs?.pineconeEnv as string
|
||||
const index = nodeData.inputs?.pineconeIndex as string
|
||||
const docs = nodeData.inputs?.document as string
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const output = nodeData.outputs?.output as string
|
||||
|
||||
const code = `const client = new PineconeClient()
|
||||
await client.init({
|
||||
apiKey: "${pineconeApiKey}",
|
||||
environment: "${pineconeEnv}"
|
||||
})
|
||||
|
||||
const pineconeIndex = client.Index("${index}")
|
||||
|
||||
${docs}
|
||||
|
||||
const embeddings = ${embeddings}
|
||||
|
||||
const vectorStore = await PineconeStore.fromDocuments(docs, embeddings, {
|
||||
pineconeIndex
|
||||
})`
|
||||
if (output === 'retriever') {
|
||||
return `${code}\nconst vectorStoreRetriever = vectorStore.asRetriever()`
|
||||
}
|
||||
return code
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: PineconeUpsert_VectorStores }
|
||||
|
|
|
|||
|
|
@ -75,6 +75,8 @@ export interface INode extends INodeProperties {
|
|||
output?: INodeOutputsValue[]
|
||||
init?(nodeData: INodeData, input: string, options?: ICommonObject): Promise<any>
|
||||
run?(nodeData: INodeData, input: string, options?: ICommonObject): Promise<string>
|
||||
jsCodeImport?(): string
|
||||
jsCode?(nodeData: INodeData, input: string, options?: ICommonObject): string
|
||||
}
|
||||
|
||||
export interface INodeData extends INodeProperties {
|
||||
|
|
|
|||
|
|
@ -64,8 +64,6 @@ export const Bot = (props: BotProps) => {
|
|||
}
|
||||
})
|
||||
|
||||
console.log(data)
|
||||
|
||||
if (data) {
|
||||
setMessages((prevMessages) => [...prevMessages, { message: data, type: 'apiMessage' }])
|
||||
setLoading(false)
|
||||
|
|
|
|||
|
|
@ -12,7 +12,8 @@ import {
|
|||
getEndingNode,
|
||||
constructGraphs,
|
||||
resolveVariables,
|
||||
isStartNodeDependOnInput
|
||||
isStartNodeDependOnInput,
|
||||
buildLangchainCode
|
||||
} from './utils'
|
||||
import { cloneDeep } from 'lodash'
|
||||
import { getDataSource } from './DataSource'
|
||||
|
|
@ -284,6 +285,56 @@ export class App {
|
|||
}
|
||||
})
|
||||
|
||||
this.app.get('/api/v1/exportcode/:id', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const chatflowid = req.params.id
|
||||
|
||||
/*** Get chatflows and prepare data ***/
|
||||
const chatflow = await this.AppDataSource.getRepository(ChatFlow).findOneBy({
|
||||
id: chatflowid
|
||||
})
|
||||
if (!chatflow) return res.status(404).send(`Chatflow ${chatflowid} not found`)
|
||||
|
||||
const flowData = chatflow.flowData
|
||||
const parsedFlowData: IReactFlowObject = JSON.parse(flowData)
|
||||
const nodes = parsedFlowData.nodes
|
||||
const edges = parsedFlowData.edges
|
||||
|
||||
/*** Get Ending Node with Directed Graph ***/
|
||||
const { graph, nodeDependencies } = constructGraphs(nodes, edges)
|
||||
const directedGraph = graph
|
||||
const endingNodeId = getEndingNode(nodeDependencies, directedGraph)
|
||||
if (!endingNodeId) return res.status(500).send(`Ending node must be either a Chain or Agent`)
|
||||
|
||||
const endingNodeData = nodes.find((nd) => nd.id === endingNodeId)?.data
|
||||
if (!endingNodeData) return res.status(500).send(`Ending node must be either a Chain or Agent`)
|
||||
|
||||
if (
|
||||
endingNodeData.outputs &&
|
||||
Object.keys(endingNodeData.outputs).length &&
|
||||
!Object.values(endingNodeData.outputs).includes(endingNodeData.name)
|
||||
) {
|
||||
return res
|
||||
.status(500)
|
||||
.send(
|
||||
`Output of ${endingNodeData.label} (${endingNodeData.id}) must be ${endingNodeData.label}, can't be an Output Prediction`
|
||||
)
|
||||
}
|
||||
|
||||
/*** Get Starting Nodes with Non-Directed Graph ***/
|
||||
const constructedObj = constructGraphs(nodes, edges, true)
|
||||
const nonDirectedGraph = constructedObj.graph
|
||||
const { startingNodeIds, depthQueue } = getStartingNodes(nonDirectedGraph, endingNodeId)
|
||||
|
||||
/*** BFS to traverse from Starting Nodes to Ending Node ***/
|
||||
const finalCode = await buildLangchainCode(startingNodeIds, nodes, graph, depthQueue, this.nodesPool.componentNodes)
|
||||
|
||||
return res.send(finalCode)
|
||||
} catch (e: any) {
|
||||
return res.status(500).send(e.message)
|
||||
}
|
||||
})
|
||||
|
||||
// ----------------------------------------
|
||||
// Marketplaces
|
||||
// ----------------------------------------
|
||||
|
|
|
|||
|
|
@ -362,3 +362,202 @@ export const isStartNodeDependOnInput = (startingNodes: IReactFlowNode[]): boole
|
|||
}
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Build langchain from start to end
|
||||
* @param {string} startingNodeId
|
||||
* @param {IReactFlowNode[]} reactFlowNodes
|
||||
* @param {INodeDirectedGraph} graph
|
||||
* @param {IDepthQueue} depthQueue
|
||||
* @param {IComponentNodes} componentNodes
|
||||
*/
|
||||
export const buildLangchainCode = async (
|
||||
startingNodeIds: string[],
|
||||
reactFlowNodes: IReactFlowNode[],
|
||||
graph: INodeDirectedGraph,
|
||||
depthQueue: IDepthQueue,
|
||||
componentNodes: IComponentNodes
|
||||
) => {
|
||||
const flowNodes = cloneDeep(reactFlowNodes)
|
||||
const jsCodeReference: ICommonObject = {}
|
||||
let finalCode = ''
|
||||
let finalImports = ''
|
||||
|
||||
// Create a Queue and add our initial node in it
|
||||
const nodeQueue = [] as INodeQueue[]
|
||||
const exploredNode = {} as IExploredNode
|
||||
|
||||
// In the case of infinite loop, only max 3 loops will be executed
|
||||
const maxLoop = 3
|
||||
|
||||
for (let i = 0; i < startingNodeIds.length; i += 1) {
|
||||
nodeQueue.push({ nodeId: startingNodeIds[i], depth: 0 })
|
||||
exploredNode[startingNodeIds[i]] = { remainingLoop: maxLoop, lastSeenDepth: 0 }
|
||||
}
|
||||
|
||||
while (nodeQueue.length) {
|
||||
const { nodeId, depth } = nodeQueue.shift() as INodeQueue
|
||||
|
||||
const reactFlowNode = flowNodes.find((nd) => nd.id === nodeId)
|
||||
const nodeIndex = flowNodes.findIndex((nd) => nd.id === nodeId)
|
||||
if (!reactFlowNode || reactFlowNode === undefined || nodeIndex < 0) continue
|
||||
|
||||
try {
|
||||
const nodeInstanceFilePath = componentNodes[reactFlowNode.data.name].filePath as string
|
||||
const nodeModule = await import(nodeInstanceFilePath)
|
||||
const newNodeInstance = new nodeModule.nodeClass()
|
||||
|
||||
const reactFlowNodeData: INodeData = resolveJsCode(reactFlowNode.data, flowNodes, jsCodeReference)
|
||||
|
||||
if (newNodeInstance.jsCodeImport) {
|
||||
finalImports += newNodeInstance.jsCodeImport(reactFlowNodeData) + `\n`
|
||||
}
|
||||
|
||||
if (newNodeInstance.jsCode) {
|
||||
jsCodeReference[reactFlowNodeData.id] = newNodeInstance.jsCode(reactFlowNodeData)
|
||||
finalCode = newNodeInstance.jsCode(reactFlowNodeData)
|
||||
}
|
||||
} catch (e: any) {
|
||||
console.error(e)
|
||||
throw new Error(e)
|
||||
}
|
||||
|
||||
const neighbourNodeIds = graph[nodeId]
|
||||
const nextDepth = depth + 1
|
||||
|
||||
// Find other nodes that are on the same depth level
|
||||
const sameDepthNodeIds = Object.keys(depthQueue).filter((key) => depthQueue[key] === nextDepth)
|
||||
|
||||
for (const id of sameDepthNodeIds) {
|
||||
if (neighbourNodeIds.includes(id)) continue
|
||||
neighbourNodeIds.push(id)
|
||||
}
|
||||
|
||||
for (let i = 0; i < neighbourNodeIds.length; i += 1) {
|
||||
const neighNodeId = neighbourNodeIds[i]
|
||||
|
||||
// If nodeId has been seen, cycle detected
|
||||
if (Object.prototype.hasOwnProperty.call(exploredNode, neighNodeId)) {
|
||||
const { remainingLoop, lastSeenDepth } = exploredNode[neighNodeId]
|
||||
|
||||
if (lastSeenDepth === nextDepth) continue
|
||||
|
||||
if (remainingLoop === 0) {
|
||||
break
|
||||
}
|
||||
const remainingLoopMinusOne = remainingLoop - 1
|
||||
exploredNode[neighNodeId] = { remainingLoop: remainingLoopMinusOne, lastSeenDepth: nextDepth }
|
||||
nodeQueue.push({ nodeId: neighNodeId, depth: nextDepth })
|
||||
} else {
|
||||
exploredNode[neighNodeId] = { remainingLoop: maxLoop, lastSeenDepth: nextDepth }
|
||||
nodeQueue.push({ nodeId: neighNodeId, depth: nextDepth })
|
||||
}
|
||||
}
|
||||
}
|
||||
return `${finalImports}\n${finalCode}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve only for isAcceptVariable
|
||||
* @param {string} paramValue
|
||||
* @param {IReactFlowNode[]} reactFlowNodes
|
||||
* @param {boolean} isAcceptVariable
|
||||
* @returns {string}
|
||||
*/
|
||||
export const getJsCode = (
|
||||
paramValue: string,
|
||||
reactFlowNodes: IReactFlowNode[],
|
||||
jsCodeReference: ICommonObject,
|
||||
isAcceptVariable = false
|
||||
) => {
|
||||
let returnVal = paramValue
|
||||
const variableStack = []
|
||||
const variableDict = {} as IVariableDict
|
||||
let startIdx = 0
|
||||
const endIdx = returnVal.length - 1
|
||||
|
||||
while (startIdx < endIdx) {
|
||||
const substr = returnVal.substring(startIdx, startIdx + 2)
|
||||
|
||||
// Store the opening double curly bracket
|
||||
if (substr === '{{') {
|
||||
variableStack.push({ substr, startIdx: startIdx + 2 })
|
||||
}
|
||||
|
||||
// Found the complete variable
|
||||
if (substr === '}}' && variableStack.length > 0 && variableStack[variableStack.length - 1].substr === '{{') {
|
||||
const variableStartIdx = variableStack[variableStack.length - 1].startIdx
|
||||
const variableEndIdx = startIdx
|
||||
const variableFullPath = returnVal.substring(variableStartIdx, variableEndIdx)
|
||||
|
||||
if (isAcceptVariable && variableFullPath === QUESTION_VAR_PREFIX) {
|
||||
variableDict[`{{${variableFullPath}}}`] = `<your question>`
|
||||
}
|
||||
|
||||
// Split by first occurence of '.' to get just nodeId
|
||||
const [variableNodeId, _] = variableFullPath.split('.')
|
||||
const executedNode = reactFlowNodes.find((nd) => nd.id === variableNodeId)
|
||||
if (executedNode) {
|
||||
if (isAcceptVariable) {
|
||||
variableDict[`{{${variableFullPath}}}`] = `<replace with output from ${variableNodeId}>`
|
||||
} else {
|
||||
const variableValue = get(jsCodeReference, variableNodeId)
|
||||
returnVal = variableValue
|
||||
}
|
||||
}
|
||||
variableStack.pop()
|
||||
}
|
||||
startIdx += 1
|
||||
}
|
||||
|
||||
if (isAcceptVariable) {
|
||||
const variablePaths = Object.keys(variableDict)
|
||||
variablePaths.sort() // Sort by length of variable path because longer path could possibly contains nested variable
|
||||
variablePaths.forEach((path) => {
|
||||
const variableValue = variableDict[path]
|
||||
// Replace all occurence
|
||||
returnVal = returnVal.split(path).join(variableValue)
|
||||
})
|
||||
return returnVal
|
||||
}
|
||||
return returnVal
|
||||
}
|
||||
|
||||
/**
|
||||
* Loop through each inputs and resolve variable if neccessary
|
||||
* @param {INodeData} reactFlowNodeData
|
||||
* @param {IReactFlowNode[]} reactFlowNodes
|
||||
* @returns {INodeData}
|
||||
*/
|
||||
export const resolveJsCode = (
|
||||
reactFlowNodeData: INodeData,
|
||||
reactFlowNodes: IReactFlowNode[],
|
||||
jsCodeReference: ICommonObject
|
||||
): INodeData => {
|
||||
const flowNodeData = cloneDeep(reactFlowNodeData)
|
||||
const types = 'inputs'
|
||||
|
||||
const getParamValues = (paramsObj: ICommonObject) => {
|
||||
for (const key in paramsObj) {
|
||||
const paramValue: string = paramsObj[key]
|
||||
if (Array.isArray(paramValue)) {
|
||||
const resolvedCodes = []
|
||||
for (const param of paramValue) {
|
||||
const code = getJsCode(param, reactFlowNodes, jsCodeReference)
|
||||
resolvedCodes.push(code)
|
||||
}
|
||||
paramsObj[key] = `[${resolvedCodes}]`
|
||||
} else {
|
||||
const isAcceptVariable = reactFlowNodeData.inputParams.find((param) => param.name === key)?.acceptVariable ?? false
|
||||
const resolvedCodes = getJsCode(paramValue, reactFlowNodes, jsCodeReference, isAcceptVariable)
|
||||
paramsObj[key] = resolvedCodes
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const paramsObj = (flowNodeData as any)[types]
|
||||
|
||||
getParamValues(paramsObj)
|
||||
|
||||
return flowNodeData
|
||||
}
|
||||
|
|
|
|||
|
|
@ -10,10 +10,13 @@ const updateChatflow = (id, body) => client.put(`/chatflows/${id}`, body)
|
|||
|
||||
const deleteChatflow = (id) => client.delete(`/chatflows/${id}`)
|
||||
|
||||
const exportCode = (id) => client.get(`/exportcode/${id}`)
|
||||
|
||||
export default {
|
||||
getAllChatflows,
|
||||
getSpecificChatflow,
|
||||
createNewChatflow,
|
||||
updateChatflow,
|
||||
deleteChatflow
|
||||
deleteChatflow,
|
||||
exportCode
|
||||
}
|
||||
|
|
|
|||
|
|
@ -40,7 +40,7 @@ function a11yProps(index) {
|
|||
|
||||
const APICodeDialog = ({ show, dialogProps, onCancel }) => {
|
||||
const portalElement = document.getElementById('portal')
|
||||
const codes = ['Embed', 'Python', 'JavaScript', 'cURL']
|
||||
const codes = ['Embed', 'Python API', 'JavaScript API', 'JavaScript Code', 'cURL']
|
||||
const [value, setValue] = useState(0)
|
||||
|
||||
const handleChange = (event, newValue) => {
|
||||
|
|
@ -48,7 +48,7 @@ const APICodeDialog = ({ show, dialogProps, onCancel }) => {
|
|||
}
|
||||
|
||||
const getCode = (codeLang) => {
|
||||
if (codeLang === 'Python') {
|
||||
if (codeLang === 'Python API') {
|
||||
return `import requests
|
||||
|
||||
API_URL = "${baseURL}/api/v1/prediction/${dialogProps.chatflowid}"
|
||||
|
|
@ -61,7 +61,7 @@ output = query({
|
|||
"question": "Hey, how are you?",
|
||||
})
|
||||
`
|
||||
} else if (codeLang === 'JavaScript') {
|
||||
} else if (codeLang === 'JavaScript API') {
|
||||
return `async function query(data) {
|
||||
const response = await fetch(
|
||||
"${baseURL}/api/v1/prediction/${dialogProps.chatflowid}",
|
||||
|
|
@ -89,14 +89,16 @@ output = query({
|
|||
return `curl ${baseURL}/api/v1/prediction/${dialogProps.chatflowid} \\
|
||||
-X POST \\
|
||||
-d '{"question": "Hey, how are you?"}'`
|
||||
} else if (codeLang === 'JavaScript Code') {
|
||||
return dialogProps ? dialogProps.exportedCode : ''
|
||||
}
|
||||
return ''
|
||||
}
|
||||
|
||||
const getLang = (codeLang) => {
|
||||
if (codeLang === 'Python') {
|
||||
if (codeLang === 'Python API') {
|
||||
return 'python'
|
||||
} else if (codeLang === 'JavaScript' || codeLang === 'Embed') {
|
||||
} else if (codeLang === 'JavaScript API' || codeLang === 'Embed' || codeLang === 'JavaScript Code') {
|
||||
return 'javascript'
|
||||
} else if (codeLang === 'cURL') {
|
||||
return 'bash'
|
||||
|
|
@ -105,9 +107,9 @@ output = query({
|
|||
}
|
||||
|
||||
const getSVG = (codeLang) => {
|
||||
if (codeLang === 'Python') {
|
||||
if (codeLang === 'Python API') {
|
||||
return pythonSVG
|
||||
} else if (codeLang === 'JavaScript') {
|
||||
} else if (codeLang === 'JavaScript API' || codeLang === 'JavaScript Code') {
|
||||
return javascriptSVG
|
||||
} else if (codeLang === 'Embed') {
|
||||
return EmbedSVG
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ import { useTheme } from '@mui/material/styles'
|
|||
import { Avatar, Box, ButtonBase, Typography, Stack, TextField } from '@mui/material'
|
||||
|
||||
// icons
|
||||
import { IconSettings, IconChevronLeft, IconDeviceFloppy, IconPencil, IconCheck, IconX, IconWorldWww } from '@tabler/icons'
|
||||
import { IconSettings, IconChevronLeft, IconDeviceFloppy, IconPencil, IconCheck, IconX, IconCode } from '@tabler/icons'
|
||||
|
||||
// project imports
|
||||
import Settings from 'views/settings'
|
||||
|
|
@ -79,10 +79,18 @@ const CanvasHeader = ({ chatflow, handleSaveFlow, handleDeleteFlow, handleLoadFl
|
|||
}
|
||||
}
|
||||
|
||||
const onAPIDialogClick = () => {
|
||||
const onAPIDialogClick = async () => {
|
||||
let exportedCode = ''
|
||||
try {
|
||||
const response = await chatflowsApi.exportCode(chatflow.id)
|
||||
exportedCode = response.data
|
||||
} catch (error) {
|
||||
console.error(error)
|
||||
}
|
||||
setAPIDialogProps({
|
||||
title: 'Embed in your application or use as API',
|
||||
chatflowid: chatflow.id
|
||||
title: 'Embed in your application, use as API, or export as code',
|
||||
chatflowid: chatflow.id,
|
||||
exportedCode
|
||||
})
|
||||
setAPIDialogOpen(true)
|
||||
}
|
||||
|
|
@ -248,7 +256,7 @@ const CanvasHeader = ({ chatflow, handleSaveFlow, handleDeleteFlow, handleLoadFl
|
|||
color='inherit'
|
||||
onClick={onAPIDialogClick}
|
||||
>
|
||||
<IconWorldWww stroke={1.5} size='1.3rem' />
|
||||
<IconCode stroke={1.5} size='1.3rem' />
|
||||
</Avatar>
|
||||
</ButtonBase>
|
||||
)}
|
||||
|
|
|
|||
Loading…
Reference in New Issue