Merge branch 'main' into CHORE/Upgrade-Analytic-Dependencies
This commit is contained in:
commit
c0d311c12c
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "flowise",
|
||||
"version": "1.4.7",
|
||||
"version": "1.4.9",
|
||||
"private": true,
|
||||
"homepage": "https://flowiseai.com",
|
||||
"workspaces": [
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
import { OpenAIBaseInput } from 'langchain/dist/types/openai-types'
|
||||
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
import { AzureOpenAIInput, ChatOpenAI } from 'langchain/chat_models/openai'
|
||||
import { AzureOpenAIInput, ChatOpenAI, OpenAIChatInput } from 'langchain/chat_models/openai'
|
||||
import { BaseCache } from 'langchain/schema'
|
||||
import { BaseLLMParams } from 'langchain/llms/base'
|
||||
|
||||
|
|
@ -123,7 +122,7 @@ class AzureChatOpenAI_ChatModels implements INode {
|
|||
const azureOpenAIApiDeploymentName = getCredentialParam('azureOpenAIApiDeploymentName', credentialData, nodeData)
|
||||
const azureOpenAIApiVersion = getCredentialParam('azureOpenAIApiVersion', credentialData, nodeData)
|
||||
|
||||
const obj: Partial<AzureOpenAIInput> & BaseLLMParams & Partial<OpenAIBaseInput> = {
|
||||
const obj: Partial<AzureOpenAIInput> & BaseLLMParams & Partial<OpenAIChatInput> = {
|
||||
temperature: parseFloat(temperature),
|
||||
modelName,
|
||||
azureOpenAIApiKey,
|
||||
|
|
|
|||
|
|
@ -124,13 +124,13 @@ class ChatMistral_ChatModels implements INode {
|
|||
const safeMode = nodeData.inputs?.safeMode as boolean
|
||||
const randomSeed = nodeData.inputs?.safeMode as string
|
||||
const overrideEndpoint = nodeData.inputs?.overrideEndpoint as string
|
||||
// Waiting fix from langchain + mistral to enable streaming - https://github.com/mistralai/client-js/issues/18
|
||||
|
||||
const streaming = nodeData.inputs?.streaming as boolean
|
||||
const cache = nodeData.inputs?.cache as BaseCache
|
||||
|
||||
const obj: ChatMistralAIInput = {
|
||||
apiKey: apiKey,
|
||||
modelName: modelName
|
||||
modelName: modelName,
|
||||
streaming: streaming ?? true
|
||||
}
|
||||
|
||||
if (maxOutputTokens) obj.maxTokens = parseInt(maxOutputTokens, 10)
|
||||
|
|
|
|||
|
|
@ -1,8 +1,7 @@
|
|||
import { INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses } from '../../../src/utils'
|
||||
import { ChatOllama } from 'langchain/chat_models/ollama'
|
||||
import { ChatOllama, ChatOllamaInput } from 'langchain/chat_models/ollama'
|
||||
import { BaseCache } from 'langchain/schema'
|
||||
import { OllamaInput } from 'langchain/dist/util/ollama'
|
||||
import { BaseLLMParams } from 'langchain/llms/base'
|
||||
|
||||
class ChatOllama_ChatModels implements INode {
|
||||
|
|
@ -209,7 +208,7 @@ class ChatOllama_ChatModels implements INode {
|
|||
|
||||
const cache = nodeData.inputs?.cache as BaseCache
|
||||
|
||||
const obj: OllamaInput & BaseLLMParams = {
|
||||
const obj: ChatOllamaInput & BaseLLMParams = {
|
||||
baseUrl,
|
||||
temperature: parseFloat(temperature),
|
||||
model: modelName
|
||||
|
|
|
|||
|
|
@ -66,6 +66,10 @@ class NotionDB_DocumentLoaders implements INode {
|
|||
auth: notionIntegrationToken
|
||||
},
|
||||
id: databaseId,
|
||||
callerOptions: {
|
||||
maxConcurrency: 64 // Default value
|
||||
},
|
||||
propertiesAsHeader: true, // Prepends a front matter header of the page properties to the page contents
|
||||
type: 'database'
|
||||
}
|
||||
const loader = new NotionAPILoader(obj)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import { INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses } from '../../../src/utils'
|
||||
import { OllamaInput } from 'langchain/llms/ollama'
|
||||
import { OllamaEmbeddings } from 'langchain/embeddings/ollama'
|
||||
import { OllamaInput } from 'langchain/dist/util/ollama'
|
||||
|
||||
class OllamaEmbedding_Embeddings implements INode {
|
||||
label: string
|
||||
|
|
|
|||
|
|
@ -1,8 +1,7 @@
|
|||
import { INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses } from '../../../src/utils'
|
||||
import { Ollama } from 'langchain/llms/ollama'
|
||||
import { Ollama, OllamaInput } from 'langchain/llms/ollama'
|
||||
import { BaseCache } from 'langchain/schema'
|
||||
import { OllamaInput } from 'langchain/dist/util/ollama'
|
||||
import { BaseLLMParams } from 'langchain/llms/base'
|
||||
|
||||
class Ollama_LLMs implements INode {
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "flowise-components",
|
||||
"version": "1.4.9",
|
||||
"version": "1.5.0",
|
||||
"description": "Flowiseai Components",
|
||||
"main": "dist/src/index",
|
||||
"types": "dist/src/index.d.ts",
|
||||
|
|
@ -26,8 +26,8 @@
|
|||
"@gomomento/sdk-core": "^1.51.1",
|
||||
"@google-ai/generativelanguage": "^0.2.1",
|
||||
"@huggingface/inference": "^2.6.1",
|
||||
"@langchain/google-genai": "^0.0.3",
|
||||
"@langchain/mistralai": "^0.0.3",
|
||||
"@langchain/google-genai": "^0.0.6",
|
||||
"@langchain/mistralai": "^0.0.6",
|
||||
"@notionhq/client": "^2.2.8",
|
||||
"@opensearch-project/opensearch": "^1.2.0",
|
||||
"@pinecone-database/pinecone": "^1.1.1",
|
||||
|
|
@ -52,7 +52,7 @@
|
|||
"html-to-text": "^9.0.5",
|
||||
"husky": "^8.0.3",
|
||||
"ioredis": "^5.3.2",
|
||||
"langchain": "^0.0.196",
|
||||
"langchain": "^0.0.213",
|
||||
"langfuse": "2.0.2",
|
||||
"langfuse-langchain": "2.0.2",
|
||||
"langsmith": "0.0.53",
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "flowise",
|
||||
"version": "1.4.7",
|
||||
"version": "1.4.9",
|
||||
"description": "Flowiseai Server",
|
||||
"main": "dist/index",
|
||||
"types": "dist/index.d.ts",
|
||||
|
|
|
|||
|
|
@ -818,7 +818,7 @@ export const findAvailableConfigs = (reactFlowNodes: IReactFlowNode[], component
|
|||
*/
|
||||
export const isFlowValidForStream = (reactFlowNodes: IReactFlowNode[], endingNodeData: INodeData) => {
|
||||
const streamAvailableLLMs = {
|
||||
'Chat Models': ['azureChatOpenAI', 'chatOpenAI', 'chatAnthropic', 'chatOllama', 'awsChatBedrock'],
|
||||
'Chat Models': ['azureChatOpenAI', 'chatOpenAI', 'chatAnthropic', 'chatOllama', 'awsChatBedrock', 'chatMistralAI'],
|
||||
LLMs: ['azureOpenAI', 'openAI', 'ollama']
|
||||
}
|
||||
|
||||
|
|
@ -875,7 +875,9 @@ export const getEncryptionKey = async (): Promise<string> => {
|
|||
return await fs.promises.readFile(getEncryptionKeyPath(), 'utf8')
|
||||
} catch (error) {
|
||||
const encryptKey = generateEncryptKey()
|
||||
const defaultLocation = path.join(getUserHome(), '.flowise', 'encryption.key')
|
||||
const defaultLocation = process.env.SECRETKEY_PATH
|
||||
? path.join(process.env.SECRETKEY_PATH, 'encryption.key')
|
||||
: path.join(getUserHome(), '.flowise', 'encryption.key')
|
||||
await fs.promises.writeFile(defaultLocation, encryptKey)
|
||||
return encryptKey
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "flowise-ui",
|
||||
"version": "1.4.5",
|
||||
"version": "1.4.6",
|
||||
"license": "SEE LICENSE IN LICENSE.md",
|
||||
"homepage": "https://flowiseai.com",
|
||||
"author": {
|
||||
|
|
|
|||
Loading…
Reference in New Issue