import { INode, INodeData, INodeParams } from '../../../src/Interface' import { getBaseClasses } from '../../../src/utils' import { HFInput, HuggingFaceInference } from 'langchain/llms/hf' class HuggingFaceInference_LLMs implements INode { label: string name: string type: string icon: string category: string description: string baseClasses: string[] inputs: INodeParams[] constructor() { this.label = 'HuggingFace Inference' this.name = 'huggingFaceInference_LLMs' this.type = 'HuggingFaceInference' this.icon = 'huggingface.png' this.category = 'LLMs' this.description = 'Wrapper around HuggingFace large language models' this.baseClasses = [this.type, ...getBaseClasses(HuggingFaceInference)] this.inputs = [ { label: 'Model', name: 'model', type: 'string', placeholder: 'gpt2' }, { label: 'HuggingFace Api Key', name: 'apiKey', type: 'password' }, { label: 'Temperature', name: 'temperature', type: 'number', description: 'Temperature parameter may not apply to certain model. Please check available model parameters', optional: true, additionalParams: true }, { label: 'Max Tokens', name: 'maxTokens', type: 'number', description: 'Max Tokens parameter may not apply to certain model. Please check available model parameters', optional: true, additionalParams: true }, { label: 'Top Probability', name: 'topP', type: 'number', description: 'Top Probability parameter may not apply to certain model. Please check available model parameters', optional: true, additionalParams: true }, { label: 'Top K', name: 'hfTopK', type: 'number', description: 'Top K parameter may not apply to certain model. Please check available model parameters', optional: true, additionalParams: true }, { label: 'Frequency Penalty', name: 'frequencyPenalty', type: 'number', description: 'Frequency Penalty parameter may not apply to certain model. Please check available model parameters', optional: true, additionalParams: true } ] } async init(nodeData: INodeData): Promise { const model = nodeData.inputs?.model as string const apiKey = nodeData.inputs?.apiKey as string const temperature = nodeData.inputs?.temperature as string const maxTokens = nodeData.inputs?.maxTokens as string const topP = nodeData.inputs?.topP as string const hfTopK = nodeData.inputs?.hfTopK as string const frequencyPenalty = nodeData.inputs?.frequencyPenalty as string const obj: Partial = { model, apiKey } if (temperature) obj.temperature = parseFloat(temperature) if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10) if (topP) obj.topP = parseInt(topP, 10) if (hfTopK) obj.topK = parseInt(hfTopK, 10) if (frequencyPenalty) obj.frequencyPenalty = parseInt(frequencyPenalty, 10) const huggingFace = new HuggingFaceInference(obj) return huggingFace } } module.exports = { nodeClass: HuggingFaceInference_LLMs }