add mistral

This commit is contained in:
Henry 2023-12-15 18:59:11 +00:00
parent 0ebfcccece
commit 05db533396
9 changed files with 276 additions and 3 deletions

View File

@ -11,7 +11,8 @@ class GoogleGenerativeAICredential implements INodeCredential {
this.label = 'Google Generative AI'
this.name = 'googleGenerativeAI'
this.version = 1.0
this.description = 'Get your <a target="_blank" href="https://ai.google.dev/tutorials/setup">API Key</a> here.'
this.description =
'You can get your API key from official <a target="_blank" href="https://ai.google.dev/tutorials/setup">page</a> here.'
this.inputs = [
{
label: 'Google AI API Key',

View File

@ -0,0 +1,25 @@
import { INodeParams, INodeCredential } from '../src/Interface'
class MistralAICredential implements INodeCredential {
label: string
name: string
version: number
description: string
inputs: INodeParams[]
constructor() {
this.label = 'MistralAI API'
this.name = 'mistralAIApi'
this.version = 1.0
this.description = 'You can get your API key from official <a target="_blank" href="https://console.mistral.ai/">console</a> here.'
this.inputs = [
{
label: 'MistralAI API Key',
name: 'mistralAIAPIKey',
type: 'password'
}
]
}
}
module.exports = { credClass: MistralAICredential }

View File

@ -49,8 +49,7 @@ class GoogleGenerativeAI_ChatModels implements INode {
name: 'gemini-pro'
}
],
default: 'gemini-pro',
optional: true
default: 'gemini-pro'
},
{
label: 'Temperature',

View File

@ -0,0 +1,151 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { BaseCache } from 'langchain/schema'
import { ChatMistralAI, ChatMistralAIInput } from '@langchain/mistralai'
class ChatMistral_ChatModels implements INode {
label: string
name: string
version: number
type: string
icon: string
category: string
description: string
baseClasses: string[]
credential: INodeParams
inputs: INodeParams[]
constructor() {
this.label = 'ChatMistralAI'
this.name = 'chatMistralAI'
this.version = 1.0
this.type = 'ChatMistralAI'
this.icon = 'mistralai.png'
this.category = 'Chat Models'
this.description = 'Wrapper around Mistral large language models that use the Chat endpoint'
this.baseClasses = [this.type, ...getBaseClasses(ChatMistralAI)]
this.credential = {
label: 'Connect Credential',
name: 'credential',
type: 'credential',
credentialNames: ['mistralAIApi']
}
this.inputs = [
{
label: 'Cache',
name: 'cache',
type: 'BaseCache',
optional: true
},
{
label: 'Model Name',
name: 'modelName',
type: 'options',
options: [
{
label: 'mistral-tiny',
name: 'mistral-tiny'
},
{
label: 'mistral-small',
name: 'mistral-small'
},
{
label: 'mistral-medium',
name: 'mistral-medium'
}
],
default: 'mistral-tiny'
},
{
label: 'Temperature',
name: 'temperature',
type: 'number',
description:
'What sampling temperature to use, between 0.0 and 1.0. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic.',
step: 0.1,
default: 0.9,
optional: true
},
{
label: 'Max Output Tokens',
name: 'maxOutputTokens',
type: 'number',
description: 'The maximum number of tokens to generate in the completion.',
step: 1,
optional: true,
additionalParams: true
},
{
label: 'Top Probability',
name: 'topP',
type: 'number',
description:
'Nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.',
step: 0.1,
optional: true,
additionalParams: true
},
{
label: 'Random Seed',
name: 'randomSeed',
type: 'number',
description: 'The seed to use for random sampling. If set, different calls will generate deterministic results.',
step: 1,
optional: true,
additionalParams: true
},
{
label: 'Safe Mode',
name: 'safeMode',
type: 'boolean',
description: 'Whether to inject a safety prompt before all conversations.',
optional: true,
additionalParams: true
},
{
label: 'Override Endpoint',
name: 'overrideEndpoint',
type: 'string',
optional: true,
additionalParams: true
}
]
}
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const apiKey = getCredentialParam('mistralAIAPIKey', credentialData, nodeData)
const temperature = nodeData.inputs?.temperature as string
const modelName = nodeData.inputs?.modelName as string
const maxOutputTokens = nodeData.inputs?.maxOutputTokens as string
const topP = nodeData.inputs?.topP as string
const safeMode = nodeData.inputs?.safeMode as boolean
const randomSeed = nodeData.inputs?.safeMode as string
const overrideEndpoint = nodeData.inputs?.overrideEndpoint as string
// Waiting fix from langchain to enable streaming
const streaming = nodeData.inputs?.streaming as boolean
const cache = nodeData.inputs?.cache as BaseCache
const obj: ChatMistralAIInput = {
apiKey: apiKey,
modelName: modelName
}
if (maxOutputTokens) obj.maxTokens = parseInt(maxOutputTokens, 10)
if (topP) obj.topP = parseFloat(topP)
if (cache) obj.cache = cache
if (temperature) obj.temperature = parseFloat(temperature)
if (randomSeed) obj.randomSeed = parseFloat(randomSeed)
if (safeMode) obj.safeMode = safeMode
if (overrideEndpoint) obj.endpoint = overrideEndpoint
const model = new ChatMistralAI(obj)
return model
}
}
module.exports = { nodeClass: ChatMistral_ChatModels }

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.4 KiB

View File

@ -0,0 +1,95 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { MistralAIEmbeddings, MistralAIEmbeddingsParams } from '@langchain/mistralai'
class MistralEmbedding_Embeddings implements INode {
label: string
name: string
version: number
type: string
icon: string
category: string
description: string
baseClasses: string[]
inputs: INodeParams[]
credential: INodeParams
constructor() {
this.label = 'MistralAI Embeddings'
this.name = 'mistralAI Embeddings'
this.version = 1.0
this.type = 'MistralAIEmbeddings'
this.icon = 'mistralai.png'
this.category = 'Embeddings'
this.description = 'MistralAI API to generate embeddings for a given text'
this.baseClasses = [this.type, ...getBaseClasses(MistralAIEmbeddings)]
this.credential = {
label: 'Connect Credential',
name: 'credential',
type: 'credential',
credentialNames: ['mistralAIApi']
}
this.inputs = [
{
label: 'Model Name',
name: 'modelName',
type: 'options',
options: [
{
label: 'mistral-embed',
name: 'mistral-embed'
}
],
default: 'mistral-embed'
},
{
label: 'Batch Size',
name: 'batchSize',
type: 'number',
step: 1,
default: 512,
optional: true,
additionalParams: true
},
{
label: 'Strip New Lines',
name: 'stripNewLines',
type: 'boolean',
default: true,
optional: true,
additionalParams: true
},
{
label: 'Override Endpoint',
name: 'overrideEndpoint',
type: 'string',
optional: true,
additionalParams: true
}
]
}
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const modelName = nodeData.inputs?.modelName as string
const batchSize = nodeData.inputs?.batchSize as string
const stripNewLines = nodeData.inputs?.stripNewLines as boolean
const overrideEndpoint = nodeData.inputs?.overrideEndpoint as string
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const apiKey = getCredentialParam('mistralAIAPIKey', credentialData, nodeData)
const obj: MistralAIEmbeddingsParams = {
apiKey: apiKey,
modelName: modelName
}
if (batchSize) obj.batchSize = parseInt(batchSize, 10)
if (stripNewLines) obj.stripNewLines = stripNewLines
if (overrideEndpoint) obj.endpoint = overrideEndpoint
const model = new MistralAIEmbeddings(obj)
return model
}
}
module.exports = { nodeClass: MistralEmbedding_Embeddings }

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.4 KiB

View File

@ -27,6 +27,7 @@
"@google-ai/generativelanguage": "^0.2.1",
"@huggingface/inference": "^2.6.1",
"@langchain/google-genai": "^0.0.3",
"@langchain/mistralai": "^0.0.2",
"@notionhq/client": "^2.2.8",
"@opensearch-project/opensearch": "^1.2.0",
"@pinecone-database/pinecone": "^1.1.1",

View File

@ -711,6 +711,7 @@ export const findAvailableConfigs = (reactFlowNodes: IReactFlowNode[], component
/**
* Check to see if flow valid for stream
* TODO: perform check from component level. i.e: set streaming on component, and check here
* @param {IReactFlowNode[]} reactFlowNodes
* @param {INodeData} endingNodeData
* @returns {boolean}