Merge branch 'main' into feature/env-vars

This commit is contained in:
Henry 2023-12-21 13:38:22 +00:00
commit a2d4a3b8d0
8 changed files with 126 additions and 33 deletions

View File

@ -1,6 +1,6 @@
{ {
"name": "flowise", "name": "flowise",
"version": "1.4.6", "version": "1.4.7",
"private": true, "private": true,
"homepage": "https://flowiseai.com", "homepage": "https://flowiseai.com",
"workspaces": [ "workspaces": [

View File

@ -148,10 +148,10 @@ class Redis_VectorStores implements INode {
} }
} }
const redisClient = createClient({ url: redisUrl })
await redisClient.connect()
try { try {
const redisClient = createClient({ url: redisUrl })
await redisClient.connect()
const storeConfig: RedisVectorStoreConfig = { const storeConfig: RedisVectorStoreConfig = {
redisClient: redisClient, redisClient: redisClient,
indexName: indexName indexName: indexName

View File

@ -1,6 +1,6 @@
{ {
"name": "flowise-components", "name": "flowise-components",
"version": "1.4.8", "version": "1.4.9",
"description": "Flowiseai Components", "description": "Flowiseai Components",
"main": "dist/src/index", "main": "dist/src/index",
"types": "dist/src/index.d.ts", "types": "dist/src/index.d.ts",

View File

@ -54,7 +54,7 @@
"inputs": { "inputs": {
"name": "ai-paper-qa", "name": "ai-paper-qa",
"description": "AI Paper QA - useful for when you need to ask questions about the AI-Generated Content paper.", "description": "AI Paper QA - useful for when you need to ask questions about the AI-Generated Content paper.",
"returnDirect": "", "returnDirect": true,
"baseChain": "{{retrievalQAChain_0.data.instance}}" "baseChain": "{{retrievalQAChain_0.data.instance}}"
}, },
"outputAnchors": [ "outputAnchors": [
@ -128,7 +128,7 @@
"inputs": { "inputs": {
"name": "state-of-union-qa", "name": "state-of-union-qa",
"description": "State of the Union QA - useful for when you need to ask questions about the president speech and most recent state of the union address.", "description": "State of the Union QA - useful for when you need to ask questions about the president speech and most recent state of the union address.",
"returnDirect": "", "returnDirect": true,
"baseChain": "{{retrievalQAChain_1.data.instance}}" "baseChain": "{{retrievalQAChain_1.data.instance}}"
}, },
"outputAnchors": [ "outputAnchors": [

View File

@ -1,6 +1,6 @@
{ {
"name": "flowise", "name": "flowise",
"version": "1.4.6", "version": "1.4.7",
"description": "Flowiseai Server", "description": "Flowiseai Server",
"main": "dist/index", "main": "dist/index",
"types": "dist/index.d.ts", "types": "dist/index.d.ts",

View File

@ -21,7 +21,8 @@ import {
chatType, chatType,
IChatMessage, IChatMessage,
IReactFlowEdge, IReactFlowEdge,
IDepthQueue IDepthQueue,
INodeDirectedGraph
} from './Interface' } from './Interface'
import { import {
getNodeModulesPackagePath, getNodeModulesPackagePath,
@ -44,7 +45,8 @@ import {
checkMemorySessionId, checkMemorySessionId,
clearSessionMemoryFromViewMessageDialog, clearSessionMemoryFromViewMessageDialog,
getUserHome, getUserHome,
replaceChatHistory replaceChatHistory,
getAllConnectedNodes
} from './utils' } from './utils'
import { cloneDeep, omit, uniqWith, isEqual } from 'lodash' import { cloneDeep, omit, uniqWith, isEqual } from 'lodash'
import { getDataSource } from './DataSource' import { getDataSource } from './DataSource'
@ -1092,12 +1094,12 @@ export class App {
upload.array('files'), upload.array('files'),
(req: Request, res: Response, next: NextFunction) => getRateLimiter(req, res, next), (req: Request, res: Response, next: NextFunction) => getRateLimiter(req, res, next),
async (req: Request, res: Response) => { async (req: Request, res: Response) => {
await this.buildChatflow(req, res, undefined, false, true) await this.upsertVector(req, res)
} }
) )
this.app.post('/api/v1/vector/internal-upsert/:id', async (req: Request, res: Response) => { this.app.post('/api/v1/vector/internal-upsert/:id', async (req: Request, res: Response) => {
await this.buildChatflow(req, res, undefined, true, true) await this.upsertVector(req, res, true)
}) })
// ---------------------------------------- // ----------------------------------------
@ -1415,6 +1417,110 @@ export class App {
return undefined return undefined
} }
async upsertVector(req: Request, res: Response, isInternal: boolean = false) {
try {
const chatflowid = req.params.id
let incomingInput: IncomingInput = req.body
const chatflow = await this.AppDataSource.getRepository(ChatFlow).findOneBy({
id: chatflowid
})
if (!chatflow) return res.status(404).send(`Chatflow ${chatflowid} not found`)
if (!isInternal) {
const isKeyValidated = await this.validateKey(req, chatflow)
if (!isKeyValidated) return res.status(401).send('Unauthorized')
}
const files = (req.files as any[]) || []
if (files.length) {
const overrideConfig: ICommonObject = { ...req.body }
for (const file of files) {
const fileData = fs.readFileSync(file.path, { encoding: 'base64' })
const dataBase64String = `data:${file.mimetype};base64,${fileData},filename:${file.filename}`
const fileInputField = mapMimeTypeToInputField(file.mimetype)
if (overrideConfig[fileInputField]) {
overrideConfig[fileInputField] = JSON.stringify([...JSON.parse(overrideConfig[fileInputField]), dataBase64String])
} else {
overrideConfig[fileInputField] = JSON.stringify([dataBase64String])
}
}
incomingInput = {
question: req.body.question ?? 'hello',
overrideConfig,
history: [],
stopNodeId: req.body.stopNodeId
}
}
/*** Get chatflows and prepare data ***/
const flowData = chatflow.flowData
const parsedFlowData: IReactFlowObject = JSON.parse(flowData)
const nodes = parsedFlowData.nodes
const edges = parsedFlowData.edges
let stopNodeId = incomingInput?.stopNodeId ?? ''
let chatHistory = incomingInput?.history
let chatId = incomingInput.chatId ?? ''
let isUpsert = true
const vsNodes = nodes.filter(
(node) =>
node.data.category === 'Vector Stores' &&
!node.data.label.includes('Upsert') &&
!node.data.label.includes('Load Existing')
)
if (vsNodes.length > 1 && !stopNodeId) {
return res.status(500).send('There are multiple vector nodes, please provide stopNodeId in body request')
} else if (vsNodes.length === 1 && !stopNodeId) {
stopNodeId = vsNodes[0].data.id
} else if (!vsNodes.length && !stopNodeId) {
return res.status(500).send('No vector node found')
}
const { graph } = constructGraphs(nodes, edges, { isReversed: true })
const nodeIds = getAllConnectedNodes(graph, stopNodeId)
const filteredGraph: INodeDirectedGraph = {}
for (const key of nodeIds) {
if (Object.prototype.hasOwnProperty.call(graph, key)) {
filteredGraph[key] = graph[key]
}
}
const { startingNodeIds, depthQueue } = getStartingNodes(filteredGraph, stopNodeId)
await buildLangchain(
startingNodeIds,
nodes,
edges,
filteredGraph,
depthQueue,
this.nodesPool.componentNodes,
incomingInput.question,
chatHistory,
chatId,
chatflowid,
this.AppDataSource,
incomingInput?.overrideConfig,
this.cachePool,
isUpsert,
stopNodeId
)
const startingNodes = nodes.filter((nd) => startingNodeIds.includes(nd.data.id))
this.chatflowPool.add(chatflowid, undefined, startingNodes, incomingInput?.overrideConfig)
return res.status(201).send('Successfully Upserted')
} catch (e: any) {
logger.error('[server]: Error:', e)
return res.status(500).send(e.message)
}
}
/** /**
* Build Chatflow * Build Chatflow
* @param {Request} req * @param {Request} req
@ -1423,7 +1529,7 @@ export class App {
* @param {boolean} isInternal * @param {boolean} isInternal
* @param {boolean} isUpsert * @param {boolean} isUpsert
*/ */
async buildChatflow(req: Request, res: Response, socketIO?: Server, isInternal: boolean = false, isUpsert: boolean = false) { async buildChatflow(req: Request, res: Response, socketIO?: Server, isInternal: boolean = false) {
try { try {
const chatflowid = req.params.id const chatflowid = req.params.id
let incomingInput: IncomingInput = req.body let incomingInput: IncomingInput = req.body
@ -1464,8 +1570,7 @@ export class App {
question: req.body.question ?? 'hello', question: req.body.question ?? 'hello',
overrideConfig, overrideConfig,
history: [], history: [],
socketIOClientId: req.body.socketIOClientId, socketIOClientId: req.body.socketIOClientId
stopNodeId: req.body.stopNodeId
} }
} }
@ -1493,8 +1598,7 @@ export class App {
this.chatflowPool.activeChatflows[chatflowid].overrideConfig, this.chatflowPool.activeChatflows[chatflowid].overrideConfig,
incomingInput.overrideConfig incomingInput.overrideConfig
) && ) &&
!isStartNodeDependOnInput(this.chatflowPool.activeChatflows[chatflowid].startingNodes, nodes) && !isStartNodeDependOnInput(this.chatflowPool.activeChatflows[chatflowid].startingNodes, nodes)
!isUpsert
) )
} }
@ -1523,8 +1627,7 @@ export class App {
if ( if (
endingNodeData.outputs && endingNodeData.outputs &&
Object.keys(endingNodeData.outputs).length && Object.keys(endingNodeData.outputs).length &&
!Object.values(endingNodeData.outputs).includes(endingNodeData.name) && !Object.values(endingNodeData.outputs).includes(endingNodeData.name)
!isUpsert
) { ) {
return res return res
.status(500) .status(500)
@ -1584,17 +1687,9 @@ export class App {
chatflowid, chatflowid,
this.AppDataSource, this.AppDataSource,
incomingInput?.overrideConfig, incomingInput?.overrideConfig,
this.cachePool, this.cachePool
isUpsert,
incomingInput.stopNodeId
) )
// If request is upsert, stop here
if (isUpsert) {
this.chatflowPool.add(chatflowid, undefined, startingNodes, incomingInput?.overrideConfig)
return res.status(201).send('Successfully Upserted')
}
const nodeToExecute = const nodeToExecute =
endingNodeIds.length === 1 endingNodeIds.length === 1
? reactFlowNodes.find((node: IReactFlowNode) => endingNodeIds[0] === node.id) ? reactFlowNodes.find((node: IReactFlowNode) => endingNodeIds[0] === node.id)

View File

@ -310,10 +310,8 @@ export const buildLangchain = async (
if (overrideConfig) flowNodeData = replaceInputsWithConfig(flowNodeData, overrideConfig) if (overrideConfig) flowNodeData = replaceInputsWithConfig(flowNodeData, overrideConfig)
const reactFlowNodeData: INodeData = resolveVariables(flowNodeData, flowNodes, question, chatHistory) const reactFlowNodeData: INodeData = resolveVariables(flowNodeData, flowNodes, question, chatHistory)
if ( // TODO: Avoid processing Text Splitter + Doc Loader once Upsert & Load Existing Vector Nodes are deprecated
isUpsert && if (isUpsert && stopNodeId && nodeId === stopNodeId) {
((stopNodeId && reactFlowNodeData.id === stopNodeId) || (!stopNodeId && reactFlowNodeData.category === 'Vector Stores'))
) {
logger.debug(`[server]: Upserting ${reactFlowNode.data.label} (${reactFlowNode.data.id})`) logger.debug(`[server]: Upserting ${reactFlowNode.data.label} (${reactFlowNode.data.id})`)
await newNodeInstance.vectorStoreMethods!['upsert']!.call(newNodeInstance, reactFlowNodeData, { await newNodeInstance.vectorStoreMethods!['upsert']!.call(newNodeInstance, reactFlowNodeData, {
chatId, chatId,

View File

@ -1,6 +1,6 @@
{ {
"name": "flowise-ui", "name": "flowise-ui",
"version": "1.4.4", "version": "1.4.5",
"license": "SEE LICENSE IN LICENSE.md", "license": "SEE LICENSE IN LICENSE.md",
"homepage": "https://flowiseai.com", "homepage": "https://flowiseai.com",
"author": { "author": {