Compare commits

...

1 Commits

Author SHA1 Message Date
Henry 8d03f979ae remove teradatasql 2025-11-06 22:18:51 +00:00
4 changed files with 0 additions and 854 deletions

View File

@ -1,816 +0,0 @@
import { Document } from '@langchain/core/documents'
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface'
import { Embeddings } from '@langchain/core/embeddings'
import * as teradatasql from 'teradatasql'
import { getCredentialData, getCredentialParam } from '../../../src/utils'
class Teradata_VectorStores implements INode {
label: string
name: string
version: number
description: string
type: string
icon: string
category: string
badge: string
baseClasses: string[]
credential: INodeParams
inputs: INodeParams[]
outputs: INodeOutputsValue[]
constructor() {
this.label = 'teradata'
this.name = 'teradata'
this.version = 1.0
this.type = 'teradata'
this.icon = 'teradata.svg'
this.category = 'Vector Stores'
this.description = 'Upsert embedded data and perform similarity search upon query using Teradata Enterprise Vector Store'
this.baseClasses = [this.type, 'BaseRetriever']
this.credential = {
label: 'Connect Credential',
name: 'credential',
type: 'credential',
credentialNames: ['teradataVectorStoreApiCredentials']
}
this.inputs = [
{
label: 'Document',
name: 'document',
type: 'Document',
list: true
},
{
label: 'Embeddings',
name: 'embeddings',
type: 'Embeddings'
},
{
label: 'Vector_Store_Name',
name: 'vectorStoreName',
description: 'Teradata Vector Store Name',
placeholder: `Vector_Store_Name`,
type: 'string'
},
{
label: 'Database',
name: 'database',
description: 'Database for Teradata Vector Store',
placeholder: 'Database',
type: 'string'
},
{
label: 'Embeddings_Table_Name',
name: 'embeddingsTableName',
description: 'Table name for storing embeddings',
placeholder: 'Embeddings_Table_Name',
type: 'string'
},
{
label: 'Vector_Store_Description',
name: 'vectorStoreDescription',
description: 'Teradata Vector Store Description',
placeholder: `Vector_Store_Description`,
type: 'string',
additionalParams: true,
optional: true
},
{
label: 'Search_Algorithm',
name: 'searchAlgorithm',
description: 'Search Algorithm for Vector Store',
placeholder: 'Search_Algorithm',
type: 'string',
additionalParams: true,
optional: true
},
{
label: 'Distance_Metric',
name: 'distanceMetric',
description: 'Distance Metric to be used for distance calculation between vectors',
placeholder: 'Distance_Metric',
type: 'string',
additionalParams: true,
optional: true
},
{
label: 'Initial_Centroids_Method',
name: 'initialCentroidsMethod',
description: 'Algorithm to be used for initializing the cluster centroids for Search Algorithm KMEANS',
placeholder: 'Initial_Centroids_Method',
type: 'string',
additionalParams: true,
optional: true
},
{
label: 'Train_NumCluster',
name: 'trainNumCluster',
description: 'Number of clusters to be trained for Search Algorithm KMEANS',
placeholder: 'Train_NumCluster',
type: 'string',
additionalParams: true,
optional: true
},
{
label: 'MaxIterNum',
name: 'maxIterNum',
description: 'Maximum number of iterations to be run during training for Search Algorithm KMEANS',
placeholder: 'MaxIterNum',
type: 'string',
additionalParams: true,
optional: true
},
{
label: 'Stop_Threshold',
name: 'stopThreshold',
description: 'Threshold value at which training should be stopped for Search Algorithm KMEANS',
placeholder: 'Stop_Threshold',
type: 'string',
additionalParams: true,
optional: true
},
{
label: 'Seed',
name: 'seed',
description: 'Seed value to be used for random number generation for Search Algorithm KMEANS',
placeholder: 'Seed',
type: 'string',
additionalParams: true,
optional: true
},
{
label: 'Num_Init',
name: 'numInit',
description:
'number of times the k-means algorithm should run with different initial centroid seeds for Search Algorithm KMEANS',
placeholder: 'Num_Init',
type: 'string',
additionalParams: true,
optional: true
},
{
label: 'Top_K',
name: 'topK',
description: 'Number of top results to fetch. Default to 10',
placeholder: 'Top_K',
type: 'string',
additionalParams: true,
optional: true
},
{
label: 'Search_Threshold',
name: 'searchThreshold',
description: 'Threshold value to consider for matching tables/views while searching',
placeholder: 'Search_Threshold',
type: 'string',
additionalParams: true,
optional: true
},
{
label: 'Search_NumCluster',
name: 'searchNumCluster',
description: 'Number of clusters to be considered while searching for Search Algorithm KMEANS',
placeholder: 'Search_NumCluster',
type: 'string',
additionalParams: true,
optional: true
},
{
label: 'Ef_Search',
name: 'efSearch',
description: 'Number of neighbors to be considered during search in HNSW graph for Search Algorithm HNSW',
placeholder: 'Ef_Search',
type: 'string',
additionalParams: true,
optional: true
},
{
label: 'Num_Layer',
name: 'numLayer',
description: 'Number of layers in the HNSW graph for Search Algorithm HNSW',
placeholder: 'Num_Layer',
type: 'string',
additionalParams: true,
optional: true
},
{
label: 'Ef_Construction',
name: 'efConstruction',
description: 'Number of neighbors to be considered during construction of the HNSW graph for Search Algorithm HNSW',
placeholder: 'Ef_Construction',
type: 'string',
additionalParams: true,
optional: true
},
{
label: 'Num_ConnPerNode',
name: 'numConnPerNode',
description: 'Number of connections per node in the HNSW graph during construction for Search Algorithm HNSW',
placeholder: 'Num_ConnPerNode',
type: 'string',
additionalParams: true,
optional: true
},
{
label: 'MaxNum_ConnPerNode',
name: 'maxNumConnPerNode',
description: 'Maximum number of connections per node in the HNSW graph during construction for Search Algorithm HNSW',
placeholder: 'MaxNum_ConnPerNode',
type: 'string',
additionalParams: true,
optional: true
},
{
label: 'Apply_Heuristics',
name: 'applyHeuristics',
description:
'Specifies whether to apply heuristics optimizations during construction of the HNSW graph for Search Algorithm HNSW',
placeholder: 'Apply_Heuristics',
type: 'string',
additionalParams: true,
optional: true
},
{
label: 'Rerank_Weight',
name: 'rerankWeight',
description: 'Weight to be used for reranking the search results',
placeholder: 'Rerank_Weight',
type: 'string',
additionalParams: true,
optional: true
},
{
label: 'Relevance_Top_K',
name: 'relevanceTopK',
description: 'Number of top similarity matches to be considered for reranking',
placeholder: 'Relevance_Top_K',
type: 'string',
additionalParams: true,
optional: true
},
{
label: 'Relevance_Search_Threshold',
name: 'relevanceSearchThreshold',
description: 'Threshold value to consider for matching tables/views while reranking',
placeholder: 'Relevance_Search_Threshold',
type: 'string',
additionalParams: true,
optional: true
}
]
this.outputs = [
{
label: 'Teradata Retriever',
name: 'retriever',
baseClasses: this.baseClasses
},
{
label: 'Teradata Vector Store',
name: 'vectorStore',
baseClasses: [this.type, ...this.baseClasses]
}
]
}
//@ts-ignore
vectorStoreMethods = {
async upsert(nodeData: INodeData, options: ICommonObject): Promise<Partial<IndexingResult>> {
const docs = nodeData.inputs?.document as Document[]
const embeddings = nodeData.inputs?.embeddings as Embeddings
const embeddingsTableName = nodeData.inputs?.embeddingsTableName as string
const vectorStoreName = nodeData.inputs?.vectorStoreName as string
const database = nodeData.inputs?.database as string
const vectorStoreDescription = (nodeData.inputs?.vectorStoreDescription as string) || null
const searchAlgorithm = (nodeData.inputs?.searchAlgorithm as string) || null
const distanceMetric = (nodeData.inputs?.distanceMetric as string) || null
const initialCentroidsMethod = (nodeData.inputs?.initialCentroidsMethod as string) || null
const trainNumCluster = parseInt(nodeData.inputs?.trainNumCluster as string) || null
const maxIterNum = parseInt(nodeData.inputs?.maxIterNum as string) || null
const stopThreshold = parseFloat(nodeData.inputs?.stopThreshold as string) || null
const seed = parseInt(nodeData.inputs?.seed as string) || null
const numInit = parseInt(nodeData.inputs?.numInit as string) || null
const topK = parseInt(nodeData.inputs?.topK as string) || 10
const searchThreshold = parseFloat(nodeData.inputs?.searchThreshold as string) || null
const searchNumCluster = parseInt(nodeData.inputs?.searchNumCluster as string) || null
const efSearch = parseInt(nodeData.inputs?.efSearch as string) || null
const numLayer = parseInt(nodeData.inputs?.numLayer as string) || null
const efConstruction = parseInt(nodeData.inputs?.efConstruction as string) || null
const numConnPerNode = parseInt(nodeData.inputs?.numConnPerNode as string) || null
const maxNumConnPerNode = parseInt(nodeData.inputs?.maxNumConnPerNode as string) || null
const applyHeuristics = (nodeData.inputs?.applyHeuristics as string)?.toLowerCase() === 'true' || null
const rerankWeight = parseFloat(nodeData.inputs?.rerankWeight as string) || null
const relevanceTopK = parseInt(nodeData.inputs?.relevanceTopK as string) || null
const relevanceSearchThreshold = parseFloat(nodeData.inputs?.relevanceSearchThreshold as string) || null
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
// Get authentication parameters with fallback to direct inputs
const user = getCredentialParam('tdUsername', credentialData, nodeData) || null
const password = getCredentialParam('tdPassword', credentialData, nodeData) || null
const host = getCredentialParam('tdHostIp', credentialData, nodeData) || null
const baseURL = getCredentialParam('baseURL', credentialData, nodeData) || null
// JWT authentication parameters - prioritize credential store
const providedJwtToken = getCredentialParam('jwtToken', credentialData, nodeData) || null
if (!docs || docs.length === 0) {
throw new Error('No documents provided for upsert operation')
}
if (!embeddings) {
throw new Error('Embeddings are required for upsert operation')
}
let jwtToken = null
if (providedJwtToken) {
jwtToken = providedJwtToken
}
// Generate embeddings
const embedded_vectors = await embeddings.embedDocuments(docs.map((doc) => doc.pageContent))
if (embedded_vectors.length !== docs.length) {
throw new Error('The number of embedded vectors does not match the number of documents.')
}
const embeddings_dims = embedded_vectors[0].length
// Create Teradata connection
const connection = new teradatasql.TeradataConnection()
let cur = null
let tempTableName = ''
let embeddingsTableCreated = false
try {
// Connect to Teradata
connection.connect({
host: host,
user: user,
password: password,
database: database
})
cur = connection.cursor()
// Start transaction
connection.autocommit = false
// Create temporary embeddings table with VARBYTE first
tempTableName = `${embeddingsTableName}_temp_${Date.now()}`
const createTempTableSql = `
CREATE MULTISET TABLE ${tempTableName}
(
element_id INTEGER,
chunks VARCHAR(32000) CHARACTER SET UNICODE,
embedding VARBYTE(64000)
);
`
try {
cur.execute(createTempTableSql)
// Commit the DDL statement
connection.commit()
} catch (error: any) {
throw new Error(`Failed to create temporary table ${tempTableName}: ${error.message}`)
}
// Insert documents and embeddings into the temporary table using FastLoad
const insertSql = `
{fn teradata_require_fastload}INSERT INTO ${tempTableName} (?, ?, ?)`
const insertDataArr: any[][] = []
for (let i = 0; i < docs.length; i++) {
const doc = docs[i]
const embedding = embedded_vectors[i]
const elementId = i
// Convert embedding array of doubles to byte array for VARBYTE column
const embeddingBuffer = Buffer.alloc(embedding.length * 8) // 8 bytes per double
for (let j = 0; j < embedding.length; j++) {
embeddingBuffer.writeDoubleLE(embedding[j], j * 8)
}
insertDataArr.push([elementId, doc.pageContent, embeddingBuffer])
}
try {
cur.execute(insertSql, insertDataArr)
// Commit the insert operation
connection.commit()
} catch (error: any) {
console.error(`Failed to insert documents into temporary table: ${error.message}`)
throw error
}
// Create the final table with VECTOR datatype using the original embeddings table name
const createFinalTableSql = `
CREATE MULTISET TABLE ${embeddingsTableName}
(
element_id INTEGER,
chunks VARCHAR(32000) CHARACTER SET UNICODE,
embedding VECTOR
) no primary index;
`
try {
cur.execute(createFinalTableSql)
embeddingsTableCreated = true
// Commit the DDL statement
connection.commit()
} catch (error: any) {
throw new Error(`Failed to create final embeddings table ${embeddingsTableName}: ${error.message}`)
}
// Load data from temporary VARBYTE table to final VECTOR table with casting
const loadFinalTableSql = `
INSERT INTO ${embeddingsTableName} (element_id, chunks, embedding)
SELECT
element_id,
chunks,
CAST(embedding AS VECTOR)
FROM ${tempTableName};
`
try {
cur.execute(loadFinalTableSql)
} catch (error: any) {
console.error(`Failed to load data into final table: ${error.message}`)
throw new Error(`Failed to load data into final table: ${error.message}`)
}
// Drop the temporary table
try {
cur.execute(`DROP TABLE ${tempTableName}`)
tempTableName = '' // Clear the temp table name since it's been dropped
} catch (error: any) {
console.error(`Failed to drop temporary table: ${error.message}`)
throw new Error(`Failed to drop temporary table: ${error.message}`)
}
// Commit the transaction
connection.commit()
connection.autocommit = true // Re-enable autocommit
// Continue with the original API-based vector store upload for compatibility
const data = {
database_name: database
}
// Determine authentication method and headers
let authHeaders: Record<string, string> = {}
if (jwtToken) {
authHeaders = {
Authorization: `Bearer ${jwtToken}`,
'Content-Type': 'application/json'
}
} else {
// Encode the credentials string using Base64
const credentials: string = `${user}:${password}`
const encodedCredentials: string = Buffer.from(credentials).toString('base64')
authHeaders = {
Authorization: `Basic ${encodedCredentials}`,
'Content-Type': 'application/json'
}
}
const sessionUrl = baseURL + (baseURL.endsWith('/') ? '' : '/') + 'data-insights/api/v1/session'
const response = await fetch(sessionUrl, {
method: 'POST',
headers: authHeaders,
body: JSON.stringify(data)
})
if (!response.ok) {
throw new Error(`Failed to create session: ${response.status}`)
}
// Extract session_id from Set-Cookie header
const setCookie = response.headers.get('set-cookie')
let session_id = ''
if (setCookie) {
const match = setCookie.match(/session_id=([^;]+)/)
if (match) {
session_id = match[1]
}
}
// Utility function to filter out null/undefined values
const filterNullValues = (obj: Record<string, any>): Record<string, any> => {
return Object.fromEntries(Object.entries(obj).filter(([_, value]) => value !== null && value !== undefined))
}
const vsParameters = filterNullValues({
search_algorithm: searchAlgorithm,
top_k: topK,
embeddings_dims: embeddings_dims,
metric: distanceMetric,
initial_centroids_method: initialCentroidsMethod,
train_numcluster: trainNumCluster,
max_iternum: maxIterNum,
stop_threshold: stopThreshold,
seed: seed,
num_init: numInit,
search_threshold: searchThreshold,
search_num_cluster: searchNumCluster,
ef_search: efSearch,
num_layer: numLayer,
ef_construction: efConstruction,
num_connpernode: numConnPerNode,
maxnum_connpernode: maxNumConnPerNode,
apply_heuristics: applyHeuristics,
rerank_weight: rerankWeight,
relevance_top_k: relevanceTopK,
relevance_search_threshold: relevanceSearchThreshold,
description: vectorStoreDescription
})
const vsIndex = filterNullValues({
target_database: database,
object_names: [embeddingsTableName],
key_columns: ['element_id'],
data_columns: ['embedding'],
vector_column: 'vector_index',
is_embedded: true,
is_normalized: false,
metadata_columns: ['chunks'],
metadata_descriptions: ['Content or Chunk of the document']
})
const formData = new FormData()
formData.append('vs_parameters', JSON.stringify(vsParameters))
formData.append('vs_index', JSON.stringify(vsIndex))
const vectorstoresUrl =
baseURL + (baseURL.endsWith('/') ? '' : '/') + 'data-insights/api/v1/vectorstores/' + vectorStoreName
// Prepare headers for vectorstores API call
let vectorstoreHeaders: Record<string, string> = {}
if (jwtToken) {
vectorstoreHeaders = {
Authorization: `Bearer ${jwtToken}`,
Cookie: `session_id=${session_id}`
}
} else {
const credentials: string = `${user}:${password}`
const encodedCredentials: string = Buffer.from(credentials).toString('base64')
vectorstoreHeaders = {
Authorization: `Basic ${encodedCredentials}`,
Cookie: `session_id=${session_id}`
}
}
const upsertResponse = await fetch(vectorstoresUrl, {
method: 'POST',
headers: vectorstoreHeaders,
body: formData,
credentials: 'include'
})
if (!upsertResponse.ok) {
throw new Error(`Failed to upsert documents: ${upsertResponse.statusText}`)
}
return { numAdded: docs.length, addedDocs: docs as Document<Record<string, any>>[] }
} catch (e: any) {
// Rollback transaction on any error
try {
if (connection && !connection.autocommit) {
connection.rollback()
connection.autocommit = true
}
// Clean up temporary table if it exists
if (tempTableName && cur) {
try {
cur.execute(`DROP TABLE ${tempTableName}`)
} catch (cleanupError: any) {
console.warn(`Failed to clean up temporary table: ${cleanupError.message}`)
}
}
// Clean up embeddings table if it was created during this transaction
if (embeddingsTableCreated && cur) {
try {
cur.execute(`DROP TABLE ${embeddingsTableName}`)
} catch (cleanupError: any) {
console.warn(`Failed to clean up embeddings table: ${cleanupError.message}`)
}
}
} catch (rollbackError: any) {
console.error(`Failed to rollback transaction: ${rollbackError.message}`)
}
throw new Error(e.message || e)
} finally {
if (cur) {
cur.close()
}
// Close the connection
if (connection) {
connection.close()
}
}
}
}
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const log_level = 0
const embeddings = nodeData.inputs?.embeddings as Embeddings
const vectorStoreName = nodeData.inputs?.vectorStoreName as string
const database = nodeData.inputs?.database as string
// Optional parameters for vector store configuration
const topK = parseInt(nodeData.inputs?.topK as string) || 10
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
// Get authentication parameters with fallback to direct inputs
const user = getCredentialParam('tdUsername', credentialData, nodeData) || null
const password = getCredentialParam('tdPassword', credentialData, nodeData) || null
const baseURL = getCredentialParam('baseURL', credentialData, nodeData) || null
// JWT authentication parameters - prioritize credential store
const providedJwtToken = getCredentialParam('jwtToken', credentialData, nodeData) || null
// Check if JWT authentication should be used
let jwtToken = null
if (providedJwtToken) {
jwtToken = providedJwtToken
}
// Determine authentication headers
let authHeaders: Record<string, string> = {}
if (jwtToken) {
authHeaders = {
Authorization: `Bearer ${jwtToken}`,
'Content-Type': 'application/json'
}
} else {
const credentials = `${user}:${password}`
const encodedCredentials = Buffer.from(credentials).toString('base64')
authHeaders = {
Authorization: `Basic ${encodedCredentials}`,
'Content-Type': 'application/json'
}
}
const sessionData = {
database_name: database
}
const sessionUrl = baseURL + (baseURL.endsWith('/') ? '' : '/') + 'data-insights/api/v1/session'
const sessionResponse = await fetch(sessionUrl, {
method: 'POST',
headers: authHeaders,
body: JSON.stringify(sessionData)
})
if (!sessionResponse.ok) {
throw new Error(`Failed to create session: ${sessionResponse.status}`)
}
// Extract session_id from Set-Cookie header
const setCookie = sessionResponse.headers.get('set-cookie')
let session_id = ''
if (setCookie) {
const match = setCookie.match(/session_id=([^;]+)/)
if (match) {
session_id = match[1]
}
}
// Helper function for similarity search
const performSimilaritySearch = async (query: string): Promise<Document[]> => {
try {
// Generate embeddings for the query
const queryEmbedding = await embeddings.embedQuery(query)
if (!queryEmbedding || queryEmbedding.length === 0) {
throw new Error('Failed to generate query embedding')
}
const queryEmbeddingString = queryEmbedding.join(',')
// Prepare the search request
const searchData = {
question_vector: queryEmbeddingString
}
// Prepare headers for search API call
let searchHeaders: Record<string, string> = {}
if (jwtToken) {
searchHeaders = {
'Content-Type': 'application/json',
Authorization: `Bearer ${jwtToken}`,
Cookie: `session_id=${session_id}`
}
} else {
const credentials = `${user}:${password}`
const encodedCredentials = Buffer.from(credentials).toString('base64')
searchHeaders = {
'Content-Type': 'application/json',
Authorization: `Basic ${encodedCredentials}`,
Cookie: `session_id=${session_id}`
}
}
const searchUrl = `${baseURL}${
baseURL.endsWith('/') ? '' : '/'
}data-insights/api/v1/vectorstores/${vectorStoreName}/similarity-search?log_level=${log_level}`
const searchResponse = await fetch(searchUrl, {
method: 'POST',
headers: searchHeaders,
body: JSON.stringify(searchData),
credentials: 'include'
})
if (!searchResponse.ok) {
throw new Error(`Search failed: ${searchResponse.statusText}`)
}
const searchResults = await searchResponse.json()
return (
searchResults.similar_objects_list?.map(
(result: any) =>
new Document({
pageContent: result.chunks || '',
metadata: {
score: result.score || 0,
source: vectorStoreName,
database: result.DataBaseName,
table: result.TableName,
id: result.element_id
}
})
) || []
)
} catch (error) {
console.error('Error during similarity search:', error)
throw error
}
}
// Create vector store object following Flowise pattern
const vectorStore = {
async similaritySearch(query: string): Promise<Document[]> {
return performSimilaritySearch(query)
},
async similaritySearchWithScore(query: string): Promise<[Document, number][]> {
const docs = await performSimilaritySearch(query)
return docs.map((doc) => [doc, doc.metadata.score || 0])
},
// Add invoke method directly to vectorStore
async invoke(query: string): Promise<Document[]> {
return performSimilaritySearch(query)
},
async getRelevantDocuments(query: string): Promise<Document[]> {
return performSimilaritySearch(query)
},
async _getRelevantDocuments(query: string): Promise<Document[]> {
return performSimilaritySearch(query)
},
asRetriever() {
return {
async getRelevantDocuments(query: string): Promise<Document[]> {
return performSimilaritySearch(query)
},
async invoke(query: string): Promise<Document[]> {
return performSimilaritySearch(query)
},
async _getRelevantDocuments(query: string): Promise<Document[]> {
return performSimilaritySearch(query)
}
}
}
}
// Create retriever using the vectorStore methods
const retriever = {
async getRelevantDocuments(query: string): Promise<Document[]> {
return vectorStore.getRelevantDocuments(query)
},
async invoke(query: string): Promise<Document[]> {
return vectorStore.invoke(query)
},
async _getRelevantDocuments(query: string): Promise<Document[]> {
return vectorStore._getRelevantDocuments(query)
}
}
if (nodeData.outputs?.output === 'retriever') {
return retriever
} else if (nodeData.outputs?.output === 'vectorStore') {
;(vectorStore as any).k = topK
return vectorStore
}
return vectorStore
}
}
module.exports = { nodeClass: Teradata_VectorStores }

View File

@ -1,19 +0,0 @@
<svg width="64" height="64" viewBox="0 0 64 64" fill="none" xmlns="http://www.w3.org/2000/svg">
<g filter="url(#filter0_d_15769_12621)">
<path d="M49.3232 8H14.6768C13.1984 8 12 9.19843 12 10.6768V45.3232C12 46.8016 13.1984 48 14.6768 48H49.3232C50.8016 48 52 46.8016 52 45.3232V10.6768C52 9.19843 50.8016 8 49.3232 8Z" fill="#FF5F02"/>
<path d="M25.098 32.467V15.5882H30.1292V20.2286H35.7465V24.6834H30.1292V32.467C30.1292 34.4794 31.1745 35.1364 32.6447 35.1364H35.7391V39.5863H32.6447C27.4915 39.5814 25.098 37.3369 25.098 32.467Z" fill="white"/>
<path d="M37.8688 37.376C37.8688 36.668 38.1501 35.989 38.6507 35.4884C39.1513 34.9878 39.8303 34.7066 40.5383 34.7066C41.2462 34.7066 41.9252 34.9878 42.4258 35.4884C42.9265 35.989 43.2077 36.668 43.2077 37.376C43.2077 38.084 42.9265 38.7629 42.4258 39.2636C41.9252 39.7642 41.2462 40.0454 40.5383 40.0454C39.8303 40.0454 39.1513 39.7642 38.6507 39.2636C38.1501 38.7629 37.8688 38.084 37.8688 37.376Z" fill="white"/>
</g>
<defs>
<filter id="filter0_d_15769_12621" x="0" y="0" width="64" height="64" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
<feColorMatrix in="SourceAlpha" type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0" result="hardAlpha"/>
<feOffset dy="4"/>
<feGaussianBlur stdDeviation="6"/>
<feComposite in2="hardAlpha" operator="out"/>
<feColorMatrix type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.12 0"/>
<feBlend mode="normal" in2="BackgroundImageFix" result="effect1_dropShadow_15769_12621"/>
<feBlend mode="normal" in="SourceGraphic" in2="effect1_dropShadow_15769_12621" result="shape"/>
</filter>
</defs>
</svg>

Before

Width:  |  Height:  |  Size: 1.6 KiB

View File

@ -144,7 +144,6 @@
"sanitize-filename": "^1.6.3",
"srt-parser-2": "^1.2.3",
"supergateway": "3.0.1",
"teradatasql": "^20.0.40",
"typeorm": "^0.3.6",
"weaviate-ts-client": "^1.1.0",
"winston": "^3.9.0",

View File

@ -492,9 +492,6 @@ importers:
supergateway:
specifier: 3.0.1
version: 3.0.1(bufferutil@4.0.8)(utf-8-validate@6.0.4)
teradatasql:
specifier: ^20.0.40
version: 20.0.44
typeorm:
specifier: ^0.3.6
version: 0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@7.0.1)(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2))
@ -12793,9 +12790,6 @@ packages:
resolution: { integrity: sha512-dhG34DXATL5hSxJbIexCft8FChFXtmskoZYnoPWjXQuebWYCNkVeV3KkGegCK9CP1oswI/vQibS2GY7Em/sJJA== }
engines: { node: '>= 8' }
koffi@2.14.1:
resolution: { integrity: sha512-IMFL3IbRDXacSIjs7pPbPxgNlJ2hUtawQXU2QPdr6iw38jmv5AesAUG8HPX00xl0PPA2BbEa3noTw1YdHY+gHg== }
kuler@2.0.0:
resolution: { integrity: sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A== }
@ -16948,11 +16942,6 @@ packages:
resolution: { integrity: sha512-G13vtMYPT/J8A4X2SjdtBTphZlrp1gKv6hZiOjw14RCWg6GbHuQBGtjlx75xLbYV/wEc0D7G5K4rxKP/cXk8Bw== }
engines: { node: '>=10' }
teradatasql@20.0.44:
resolution: { integrity: sha512-3OABwc9WdGwU0aM7CkgS9OLdiP0a1G5EgaE1XBgMQgXWWp9sr4hMoO7W2ygdLYMfVWHMkCj7P0xXXuH/rN52jw== }
engines: { node: '>=18.20.7' }
hasBin: true
terminal-link@2.1.1:
resolution: { integrity: sha512-un0FmiRUQNr5PJqy9kP7c40F5BOfpGlYTrxonDChEZB7pzZxRNp/bt+ymiy9/npwXya9KH99nJ/GXFIiUkYGFQ== }
engines: { node: '>=8' }
@ -34832,8 +34821,6 @@ snapshots:
klona@2.0.6: {}
koffi@2.14.1: {}
kuler@2.0.0: {}
langchain@0.3.6(@langchain/anthropic@0.3.33(@langchain/core@0.3.61(openai@4.96.0(encoding@0.1.13)(ws@8.18.3(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/aws@0.1.11(@langchain/core@0.3.61(openai@4.96.0(encoding@0.1.13)(ws@8.18.3(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.3(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/core@0.3.61(openai@4.96.0(encoding@0.1.13)(ws@8.18.3(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.61(openai@4.96.0(encoding@0.1.13)(ws@8.18.3(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.14(@langchain/core@0.3.61(openai@4.96.0(encoding@0.1.13)(ws@8.18.3(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/groq@0.1.2(@langchain/core@0.3.61(openai@4.96.0(encoding@0.1.13)(ws@8.18.3(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.3(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.61(openai@4.96.0(encoding@0.1.13)(ws@8.18.3(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.61(openai@4.96.0(encoding@0.1.13)(ws@8.18.3(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(axios@1.12.0)(cheerio@1.0.0-rc.12)(encoding@0.1.13)(handlebars@4.7.8)(openai@4.96.0(encoding@0.1.13)(ws@8.18.3(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@7.0.1)(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)))(ws@8.18.3(bufferutil@4.0.8)(utf-8-validate@6.0.4)):
@ -39982,11 +39969,6 @@ snapshots:
type-fest: 0.16.0
unique-string: 2.0.0
teradatasql@20.0.44:
dependencies:
koffi: 2.14.1
uuid: 9.0.1
terminal-link@2.1.1:
dependencies:
ansi-escapes: 4.3.2