Merge branch 'main' into feature/ChatGPT-Plugin

This commit is contained in:
Henry 2023-04-19 10:29:00 +01:00
commit 2a04c8baea
9 changed files with 367 additions and 4 deletions

View File

@ -0,0 +1,81 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { SqlDatabaseChain } from 'langchain/chains'
import { getBaseClasses } from '../../../src/utils'
import { DataSource } from 'typeorm'
import { SqlDatabase } from 'langchain/sql_db'
import { BaseLLM } from 'langchain/llms/base'
class SqlDatabaseChain_Chains implements INode {
label: string
name: string
type: string
icon: string
category: string
baseClasses: string[]
description: string
inputs: INodeParams[]
constructor() {
this.label = 'Sql Database Chain'
this.name = 'sqlDatabaseChain'
this.type = 'SqlDatabaseChain'
this.icon = 'sqlchain.svg'
this.category = 'Chains'
this.description = 'Answer questions over a SQL database'
this.baseClasses = [this.type, ...getBaseClasses(SqlDatabaseChain)]
this.inputs = [
{
label: 'LLM',
name: 'llm',
type: 'BaseLLM'
},
{
label: 'Database',
name: 'database',
type: 'options',
options: [
{
label: 'SQlite',
name: 'sqlite'
}
],
default: 'sqlite'
},
{
label: 'Database File Path',
name: 'dbFilePath',
type: 'string',
placeholder: 'C:/Users/chinook.db'
}
]
}
async init(nodeData: INodeData): Promise<any> {
const databaseType = nodeData.inputs?.database
const llm = nodeData.inputs?.llm as BaseLLM
const dbFilePath = nodeData.inputs?.dbFilePath
const datasource = new DataSource({
type: databaseType,
database: dbFilePath
})
const db = await SqlDatabase.fromDataSourceParams({
appDataSource: datasource
})
const chain = new SqlDatabaseChain({
llm,
database: db
})
return chain
}
async run(nodeData: INodeData, input: string): Promise<string> {
const chain = nodeData.instance as SqlDatabaseChain
const res = await chain.run(input)
return res
}
}
module.exports = { nodeClass: SqlDatabaseChain_Chains }

View File

@ -0,0 +1,7 @@
<svg xmlns="http://www.w3.org/2000/svg" class="icon icon-tabler icon-tabler-sql" width="24" height="24" viewBox="0 0 24 24" stroke-width="2" stroke="currentColor" fill="none" stroke-linecap="round" stroke-linejoin="round">
<path stroke="none" d="M0 0h24v24H0z" fill="none"></path>
<path d="M12 8a2 2 0 0 1 2 2v4a2 2 0 1 1 -4 0v-4a2 2 0 0 1 2 -2z"></path>
<path d="M17 8v8h4"></path>
<path d="M13 15l1 1"></path>
<path d="M3 15a1 1 0 0 0 1 1h2a1 1 0 0 0 1 -1v-2a1 1 0 0 0 -1 -1h-2a1 1 0 0 1 -1 -1v-2a1 1 0 0 1 1 -1h2a1 1 0 0 1 1 1"></path>
</svg>

After

Width:  |  Height:  |  Size: 560 B

View File

@ -0,0 +1,64 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { TextSplitter } from 'langchain/text_splitter'
import { CheerioWebBaseLoader } from 'langchain/document_loaders/web/cheerio'
class Cheerio_DocumentLoaders implements INode {
label: string
name: string
description: string
type: string
icon: string
category: string
baseClasses: string[]
inputs: INodeParams[]
constructor() {
this.label = 'Cheerio Web Scraper'
this.name = 'cheerioWebScraper'
this.type = 'Document'
this.icon = 'cheerio.svg'
this.category = 'Document Loaders'
this.description = `Load data from webpages`
this.baseClasses = [this.type]
this.inputs = [
{
label: 'URL',
name: 'url',
type: 'string'
},
{
label: 'Text Splitter',
name: 'textSplitter',
type: 'TextSplitter',
optional: true
}
]
}
async init(nodeData: INodeData): Promise<any> {
const textSplitter = nodeData.inputs?.textSplitter as TextSplitter
let url = nodeData.inputs?.url as string
var urlPattern = new RegExp(
'^(https?:\\/\\/)?' + // validate protocol
'((([a-z\\d]([a-z\\d-]*[a-z\\d])*)\\.)+[a-z]{2,}|' + // validate domain name
'((\\d{1,3}\\.){3}\\d{1,3}))' + // validate OR ip (v4) address
'(\\:\\d+)?(\\/[-a-z\\d%_.~+]*)*' + // validate port and path
'(\\?[;&a-z\\d%_.~+=-]*)?' + // validate query string
'(\\#[-a-z\\d_]*)?$',
'i'
) // validate fragment locator
const loader = new CheerioWebBaseLoader(urlPattern.test(url.trim()) ? url.trim() : '')
if (textSplitter) {
const docs = await loader.loadAndSplit(textSplitter)
return docs
} else {
const docs = await loader.load()
return docs
}
}
}
module.exports = { nodeClass: Cheerio_DocumentLoaders }

View File

@ -0,0 +1,5 @@
<svg xmlns="http://www.w3.org/2000/svg" class="icon icon-tabler icon-tabler-circle-letter-c" width="24" height="24" viewBox="0 0 24 24" stroke-width="2" stroke="currentColor" fill="none" stroke-linecap="round" stroke-linejoin="round">
<path stroke="none" d="M0 0h24v24H0z" fill="none"></path>
<path d="M12 12m-9 0a9 9 0 1 0 18 0a9 9 0 1 0 -18 0"></path>
<path d="M14 10a2 2 0 1 0 -4 0v4a2 2 0 1 0 4 0"></path>
</svg>

After

Width:  |  Height:  |  Size: 425 B

View File

@ -0,0 +1,40 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { CohereEmbeddings } from 'langchain/embeddings/cohere'
class CohereEmbedding_Embeddings implements INode {
label: string
name: string
type: string
icon: string
category: string
description: string
baseClasses: string[]
inputs: INodeParams[]
constructor() {
this.label = 'Cohere Embeddings'
this.name = 'cohereEmbeddings'
this.type = 'CohereEmbeddings'
this.icon = 'cohere.png'
this.category = 'Embeddings'
this.description = 'Cohere API to generate embeddings for a given text'
this.baseClasses = [this.type, ...getBaseClasses(CohereEmbeddings)]
this.inputs = [
{
label: 'Cohere API Key',
name: 'cohereApiKey',
type: 'password'
}
]
}
async init(nodeData: INodeData): Promise<any> {
const apiKey = nodeData.inputs?.cohereApiKey as string
const model = new CohereEmbeddings({ apiKey })
return model
}
}
module.exports = { nodeClass: CohereEmbedding_Embeddings }

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.7 KiB

View File

@ -20,7 +20,9 @@
"@huggingface/inference": "^1.6.3",
"@pinecone-database/pinecone": "^0.0.12",
"axios": "^0.27.2",
"cheerio": "^1.0.0-rc.12",
"chromadb": "^1.3.1",
"cohere-ai": "^6.2.0",
"d3-dsv": "2",
"dotenv": "^16.0.0",
"express": "^4.17.3",

View File

@ -0,0 +1,163 @@
{
"description": "Answer questions over a SQL database",
"nodes": [
{
"width": 300,
"height": 424,
"id": "sqlDatabaseChain_0",
"position": {
"x": 1271.2742585099204,
"y": 232.91561199714107
},
"type": "customNode",
"data": {
"id": "sqlDatabaseChain_0",
"label": "Sql Database Chain",
"name": "sqlDatabaseChain",
"type": "SqlDatabaseChain",
"baseClasses": ["SqlDatabaseChain", "BaseChain"],
"category": "Chains",
"description": "Answer questions over a SQL database",
"inputParams": [
{
"label": "Database",
"name": "database",
"type": "options",
"options": [
{
"label": "SQlite",
"name": "sqlite"
}
],
"default": "sqlite"
},
{
"label": "Database File Path",
"name": "dbFilePath",
"type": "string",
"placeholder": "C:/Users/chinook.db"
}
],
"inputAnchors": [
{
"label": "LLM",
"name": "llm",
"type": "BaseLLM",
"id": "sqlDatabaseChain_0-input-llm-BaseLLM"
}
],
"inputs": {
"llm": "{{openAI_0.data.instance}}",
"database": "sqlite",
"dbFilePath": ""
},
"outputAnchors": [
{
"id": "sqlDatabaseChain_0-output-sqlDatabaseChain-SqlDatabaseChain|BaseChain",
"name": "sqlDatabaseChain",
"label": "SqlDatabaseChain",
"type": "SqlDatabaseChain | BaseChain"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1271.2742585099204,
"y": 232.91561199714107
},
"dragging": false
},
{
"width": 300,
"height": 472,
"id": "openAI_0",
"position": {
"x": 867.8574087065126,
"y": 209.58625096303308
},
"type": "customNode",
"data": {
"id": "openAI_0",
"label": "OpenAI",
"name": "openAI",
"type": "OpenAI",
"baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel"],
"category": "LLMs",
"description": "Wrapper around OpenAI large language models",
"inputParams": [
{
"label": "OpenAI Api Key",
"name": "openAIApiKey",
"type": "password"
},
{
"label": "Model Name",
"name": "modelName",
"type": "options",
"options": [
{
"label": "text-davinci-003",
"name": "text-davinci-003"
},
{
"label": "text-davinci-002",
"name": "text-davinci-002"
},
{
"label": "text-curie-001",
"name": "text-curie-001"
},
{
"label": "text-babbage-001",
"name": "text-babbage-001"
}
],
"default": "text-davinci-003",
"optional": true
},
{
"label": "Temperature",
"name": "temperature",
"type": "number",
"default": 0.7,
"optional": true
}
],
"inputAnchors": [],
"inputs": {
"modelName": "text-davinci-003",
"temperature": "0"
},
"outputAnchors": [
{
"id": "openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel",
"name": "openAI",
"label": "OpenAI",
"type": "OpenAI | BaseLLM | BaseLanguageModel"
}
],
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 867.8574087065126,
"y": 209.58625096303308
},
"dragging": false
}
],
"edges": [
{
"source": "openAI_0",
"sourceHandle": "openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel",
"target": "sqlDatabaseChain_0",
"targetHandle": "sqlDatabaseChain_0-input-llm-BaseLLM",
"type": "buttonedge",
"id": "openAI_0-openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel-sqlDatabaseChain_0-sqlDatabaseChain_0-input-llm-BaseLLM",
"data": {
"label": ""
}
}
]
}

View File

@ -39,6 +39,7 @@ export const ChatMessage = ({ chatflowid }) => {
const customization = useSelector((state) => state.customization)
const { confirm } = useConfirm()
const dispatch = useDispatch()
const ps = useRef()
useNotifier()
const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args))
@ -54,7 +55,6 @@ export const ChatMessage = ({ chatflowid }) => {
}
])
const messagesEndRef = useRef(null)
const inputRef = useRef(null)
const anchorRef = useRef(null)
const prevOpen = useRef(open)
@ -115,7 +115,9 @@ export const ChatMessage = ({ chatflowid }) => {
}
const scrollToBottom = () => {
messagesEndRef.current?.scrollIntoView({ behavior: 'smooth' })
if (ps.current) {
ps.current.scrollTo({ top: Number.MAX_SAFE_INTEGER, behavior: 'smooth' })
}
}
const addChatMessage = async (message, type) => {
@ -286,7 +288,7 @@ export const ChatMessage = ({ chatflowid }) => {
<ClickAwayListener onClickAway={handleClose}>
<MainCard border={false} elevation={16} content={false} boxShadow shadow={theme.shadows[16]}>
<div className='cloud'>
<div className='messagelist'>
<div ref={ps} className='messagelist'>
{messages.map((message, index) => {
return (
// The latest message sent by the user will be animated while waiting for a response
@ -331,7 +333,6 @@ export const ChatMessage = ({ chatflowid }) => {
</Box>
)
})}
<div ref={messagesEndRef} />
</div>
</div>
<Divider />