From cf9e3499a764233b31dc80b9d7b75a652068d4ab Mon Sep 17 00:00:00 2001 From: Henry Date: Wed, 11 Oct 2023 12:24:06 +0100 Subject: [PATCH] add ollama --- .../nodes/chatmodels/ChatOllama/ChatOllama.ts | 241 ++++++++++++++++++ .../nodes/chatmodels/ChatOllama/ollama.png | Bin 0 -> 7487 bytes .../OllamaEmbedding/OllamaEmbedding.ts | 95 +++++++ .../embeddings/OllamaEmbedding/ollama.png | Bin 0 -> 7487 bytes .../components/nodes/llms/Ollama/Ollama.ts | 241 ++++++++++++++++++ .../components/nodes/llms/Ollama/ollama.png | Bin 0 -> 7487 bytes packages/server/src/utils/index.ts | 4 +- 7 files changed, 579 insertions(+), 2 deletions(-) create mode 100644 packages/components/nodes/chatmodels/ChatOllama/ChatOllama.ts create mode 100644 packages/components/nodes/chatmodels/ChatOllama/ollama.png create mode 100644 packages/components/nodes/embeddings/OllamaEmbedding/OllamaEmbedding.ts create mode 100644 packages/components/nodes/embeddings/OllamaEmbedding/ollama.png create mode 100644 packages/components/nodes/llms/Ollama/Ollama.ts create mode 100644 packages/components/nodes/llms/Ollama/ollama.png diff --git a/packages/components/nodes/chatmodels/ChatOllama/ChatOllama.ts b/packages/components/nodes/chatmodels/ChatOllama/ChatOllama.ts new file mode 100644 index 000000000..31267743f --- /dev/null +++ b/packages/components/nodes/chatmodels/ChatOllama/ChatOllama.ts @@ -0,0 +1,241 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' +import { ChatOllama } from 'langchain/chat_models/ollama' +import { BaseCache } from 'langchain/schema' +import { OllamaInput } from 'langchain/dist/util/ollama' +import { BaseLLMParams } from 'langchain/llms/base' + +class ChatOllama_ChatModels implements INode { + label: string + name: string + version: number + type: string + icon: string + category: string + description: string + baseClasses: string[] + credential: INodeParams + inputs: INodeParams[] + + constructor() { + this.label = 'ChatOllama' + this.name = 'chatOllama' + this.version = 2.0 + this.type = 'ChatOllama' + this.icon = 'ollama.png' + this.category = 'Chat Models' + this.description = 'Chat completion using open-source LLM on Ollama' + this.baseClasses = [this.type, ...getBaseClasses(ChatOllama)] + this.inputs = [ + { + label: 'Cache', + name: 'cache', + type: 'BaseCache', + optional: true + }, + { + label: 'Base URL', + name: 'baseUrl', + type: 'string', + default: 'http://localhost:11434' + }, + { + label: 'Model Name', + name: 'modelName', + type: 'string', + placeholder: 'llama2' + }, + { + label: 'Temperature', + name: 'temperature', + type: 'number', + description: + 'The temperature of the model. Increasing the temperature will make the model answer more creatively. (Default: 0.8). Refer to docs for more details', + step: 0.1, + default: 0.9, + optional: true + }, + { + label: 'Top P', + name: 'topP', + type: 'number', + description: + 'Works together with top-k. A higher value (e.g., 0.95) will lead to more diverse text, while a lower value (e.g., 0.5) will generate more focused and conservative text. (Default: 0.9). Refer to docs for more details', + step: 0.1, + optional: true, + additionalParams: true + }, + { + label: 'Top K', + name: 'topK', + type: 'number', + description: + 'Reduces the probability of generating nonsense. A higher value (e.g. 100) will give more diverse answers, while a lower value (e.g. 10) will be more conservative. (Default: 40). Refer to docs for more details', + step: 1, + optional: true, + additionalParams: true + }, + { + label: 'Mirostat', + name: 'mirostat', + type: 'number', + description: + 'Enable Mirostat sampling for controlling perplexity. (default: 0, 0 = disabled, 1 = Mirostat, 2 = Mirostat 2.0). Refer to docs for more details', + step: 1, + optional: true, + additionalParams: true + }, + { + label: 'Mirostat ETA', + name: 'mirostatEta', + type: 'number', + description: + 'Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive. (Default: 0.1) Refer to docs for more details', + step: 0.1, + optional: true, + additionalParams: true + }, + { + label: 'Mirostat TAU', + name: 'mirostatTau', + type: 'number', + description: + 'Controls the balance between coherence and diversity of the output. A lower value will result in more focused and coherent text. (Default: 5.0) Refer to docs for more details', + step: 0.1, + optional: true, + additionalParams: true + }, + { + label: 'Context Window Size', + name: 'numCtx', + type: 'number', + description: + 'Sets the size of the context window used to generate the next token. (Default: 2048) Refer to docs for more details', + step: 1, + optional: true, + additionalParams: true + }, + { + label: 'Number of GQA groups', + name: 'numGqa', + type: 'number', + description: + 'The number of GQA groups in the transformer layer. Required for some models, for example it is 8 for llama2:70b. Refer to docs for more details', + step: 1, + optional: true, + additionalParams: true + }, + { + label: 'Number of GPU', + name: 'numGpu', + type: 'number', + description: + 'The number of layers to send to the GPU(s). On macOS it defaults to 1 to enable metal support, 0 to disable. Refer to docs for more details', + step: 1, + optional: true, + additionalParams: true + }, + { + label: 'Number of Thread', + name: 'numThread', + type: 'number', + description: + 'Sets the number of threads to use during computation. By default, Ollama will detect this for optimal performance. It is recommended to set this value to the number of physical CPU cores your system has (as opposed to the logical number of cores). Refer to docs for more details', + step: 1, + optional: true, + additionalParams: true + }, + { + label: 'Repeat Last N', + name: 'repeatLastN', + type: 'number', + description: + 'Sets how far back for the model to look back to prevent repetition. (Default: 64, 0 = disabled, -1 = num_ctx). Refer to docs for more details', + step: 1, + optional: true, + additionalParams: true + }, + { + label: 'Repeat Penalty', + name: 'repeatPenalty', + type: 'number', + description: + 'Sets how strongly to penalize repetitions. A higher value (e.g., 1.5) will penalize repetitions more strongly, while a lower value (e.g., 0.9) will be more lenient. (Default: 1.1). Refer to docs for more details', + step: 0.1, + optional: true, + additionalParams: true + }, + { + label: 'Stop Sequence', + name: 'stop', + type: 'string', + rows: 4, + placeholder: 'AI assistant:', + description: + 'Sets the stop sequences to use. Use comma to seperate different sequences. Refer to docs for more details', + optional: true, + additionalParams: true + }, + { + label: 'Tail Free Sampling', + name: 'tfsZ', + type: 'number', + description: + 'Tail free sampling is used to reduce the impact of less probable tokens from the output. A higher value (e.g., 2.0) will reduce the impact more, while a value of 1.0 disables this setting. (Default: 1). Refer to docs for more details', + step: 0.1, + optional: true, + additionalParams: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const temperature = nodeData.inputs?.temperature as string + const baseUrl = nodeData.inputs?.baseUrl as string + const modelName = nodeData.inputs?.modelName as string + const topP = nodeData.inputs?.topP as string + const topK = nodeData.inputs?.topK as string + const mirostat = nodeData.inputs?.mirostat as string + const mirostatEta = nodeData.inputs?.mirostatEta as string + const mirostatTau = nodeData.inputs?.mirostatTau as string + const numCtx = nodeData.inputs?.numCtx as string + const numGqa = nodeData.inputs?.numGqa as string + const numGpu = nodeData.inputs?.numGpu as string + const numThread = nodeData.inputs?.numThread as string + const repeatLastN = nodeData.inputs?.repeatLastN as string + const repeatPenalty = nodeData.inputs?.repeatPenalty as string + const stop = nodeData.inputs?.stop as string + const tfsZ = nodeData.inputs?.tfsZ as string + + const cache = nodeData.inputs?.cache as BaseCache + + const obj: OllamaInput & BaseLLMParams = { + baseUrl, + temperature: parseFloat(temperature), + model: modelName + } + + if (topP) obj.topP = parseFloat(topP) + if (topK) obj.topK = parseFloat(topK) + if (mirostat) obj.mirostat = parseFloat(mirostat) + if (mirostatEta) obj.mirostatEta = parseFloat(mirostatEta) + if (mirostatTau) obj.mirostatTau = parseFloat(mirostatTau) + if (numCtx) obj.numCtx = parseFloat(numCtx) + if (numGqa) obj.numGqa = parseFloat(numGqa) + if (numGpu) obj.numGpu = parseFloat(numGpu) + if (numThread) obj.numThread = parseFloat(numThread) + if (repeatLastN) obj.repeatLastN = parseFloat(repeatLastN) + if (repeatPenalty) obj.repeatPenalty = parseFloat(repeatPenalty) + if (tfsZ) obj.tfsZ = parseFloat(tfsZ) + if (stop) { + const stopSequences = stop.split(',') + obj.stop = stopSequences + } + if (cache) obj.cache = cache + + const model = new ChatOllama(obj) + return model + } +} + +module.exports = { nodeClass: ChatOllama_ChatModels } diff --git a/packages/components/nodes/chatmodels/ChatOllama/ollama.png b/packages/components/nodes/chatmodels/ChatOllama/ollama.png new file mode 100644 index 0000000000000000000000000000000000000000..8cd2cf1ed8043caf62e8b069330889c0cf0f5a3b GIT binary patch literal 7487 zcmai(cQhQ#_wb2?NFj+R!76Dw(R+;$5ky$MWU&$5vU=}Q7Ktuv^Ibd! zxAJZ-ZRbe*`$rax?}8NG4P1{`AyZMlL3{7|Y1c@PQ2)T})(rBm_oyhmtiJ|$sJH1< zs3AUc-gh)!=-NMN?bHQf#{U1?T4$TC)~9j3Fxfiddhl4&0~I*!Nf(Oe_TK`)FBluUSUW zoT=0BHlm$^);*S-+Ctzb-^N|!R+XmChcgDOVMjmd9_EZMMi~1sEH&pSXrq#5QR@!R zHzs_d6h#mIV~UGA?}lv-^~Y&`{*8PWix2e-RVJ>@zJK$$z{{!8;&0GKFl(!%l;8o&JCaD4yhSdf9sMR#Sk1 z9+SH!`2jIL{&1TXD1W6dfSbMG&q@g?{?t0D-gYSG%1Cj;r*k!Q=YxrgC`HeqS<1VK zk*2pn3Fg{s=trZ;h2h(;W8?meL-q!xyUI0X^AaNs~Qj^MM& zR~RAm_&>-P+5&#s+u{TRXGW-;F7d|uDI7j}_t%V)fXG}8zA(bt4rFx6Q4;upA;%T= zsTk`I6Ma3@Fz}r5M0g6-3decHw_)mtxZX1ZKTrR{@Zg99V(eK$;O=2w^_g0rzY&(R zy@4&~OvQT}(tsVBG=8`=*+G1=F5Xfl3K-B@j}=+;X5NE-MMs5iimA zgA+gQ;6ClYq!Emh4q~$D1*kp0B#%GEp1rZZW8pN<3=P5+UrNVCRhW-Kt}ktYzeMav znLo-`8VTTMqVJ>v`4w~8-|lG9=3}uw#GEHp>aK3=Ewq}wo6xxy5JlarQaC5DwsUyI z<(g7uLg=P%n$3||i6-1k*-@vAwojEunW^Bumo|uOq>B`ABz-08o@*?j?(!FL5_C*H z(+4^6ZVcYCM5BvFXc__Eh%EW!%?PkPb{nm6*K%8XlH6^J6zr0_wWUkWU@rQxuAwAL zmt2u`F%eybK8xnQyVSoR?KFe`t4=HMC4s@cfNx8Wacnb#&*yBiwN(x{5g+DfNSt`~ zN6jw~=O(}^gqUs6G_Q(-jckf8ZAo>+$}WLSm1R*-{&-lgw0|^PlzlugI!soR7&-&9 z1U7zP$va+{H4!Vok9QR;i4nzxxUm%0uB!bNbzAXw5Fh8EMuhjN6hX@RvVh{U zU3|9d9Q+4?8Lax<=oq`-1$zeT92N1?aS_Ig2A!ggHA{k@Ya>rS64>=6hcU{qfULif zH)8A_#on6Mt@rIk%B(HD3mz78G3RR;aa>J+^Z6}$Y>+o7MYC#6Z@aV{^k(z5_OH%Et zp*)(A$H1_2<*W#Lja`<7x6z%biOL>P$BLWv$;KnpIF%xW70K~%cxgj^$&BvD2!ObZ zhGCSw@q{35dfWtTxIBpbiCCnoAXbmA-5l-J#I*^~-AZ}lBTVog zQ?W*M;`7~h5ZSZxA@;y-Rv57gR#+pBl$!#SeP6J7x64bVmv0F0Wb8?*s!hTI-v;i)8Iq+^r3s>sd` zO_?8t@wquocIOB10x7;$MUDGaMYg#n>nWn(4;yOZ#%vQUs`Z> zy3DLa4tL~B8jIs1+{k^PqBX>uU7PCKvmI)L<)*@BHt!QUCe_*YAvTOi21rtLaT?E{ z5nlG5KhmM0c&z&7_T#RjqIWgwLvr9}; zH5rvkUeohC4Zwq5*mh%<%-x(}i3BG$>j&)oY_lFUp|HQ)UfX^qUW~8r)Q)#1L#tB} zd##<=_+;7mWu7oTF*}5OuGhBEXLFCJb?}Ol7C0DBVD=}o5+j|e(-YINL ztzf&*1>Q1mn(Qrgju^lhkj=%syDt!q?|eZJp$_)@@v$9QF%MtkS_k;LipS$aU6VGv z+tU13^aQmg(l~ToN|-sQ64*`Nm)QQ*Xycxc%-B|6JROXJ`B{z6B)8qVv{(A$6oa4m z5T-Ei^Yw3M`u2$b4H|RD9Q6Ecg501cV{y!R(3A(g)$@#js!qtzyp6-%vYj&jFcuJR z%%;5jEf1IN!%~}jRU^ILw4NF0X6DreW{PTq4{qABC3Px9!yOVjno!gP#pNNMsnnopR1i4Y~4uwX0mJQ84AU69@2 z^nj;Ku2L!^GNm5xWUe#Hs7A%65lVwXM&Nmr3KnWHU5HGAT`MFUQfV&(yJTCNi0gLn z=f~!g2W!b?doGoK%cI9(oE9ZUo6U#?3zNZDNUs(CfuyFJ3DPq%w}FJ?xm{o~txF)i zLvX|1QiB0OfBqMrPd09T=?q5}TCAH%YdU^HTub<)<@+7?%v73urd*hZ=_i}5om4PN z)aYniHn&7WS5u0J?{q<_P@iUmvdm_c%slkBV*$D9SbiP5bQ;Ca#V|(m@g3Mve)G&x zp|;d;c9To;{Zu3Nimp&Qida$FLZ+=K3}TLe5u+III>kHmL|z9!AaXva+0#9J4WhnY9znXyZAhznU{B+x zK{y~F1*c34-DNb6LBAQAV$@4|DE8i0T%J9c#Mp$h%1v#D@aW2#>-yHorTdo1$!QaM zzW_x`DU3Ib3VeN=2ciU!MhTr_a>8fZgW;?NQm#KnTfP*pH^h-U4O};(JPv2bl^w9P zwyiY4bl1YCv_{d&m14cyZK_QE+blqeN9p{rg)bTEZjKdWnz%PkLTj<0@v13@AV@7| zhU1b=5VEEAmp{9_Z~0CHs&=!qeFCBB^L=67;qvGXF={wU|LkDntI$ZD@~CqXTGn_^ zt-(+XdDG~n&ulLAAo8fHey2?IDqX9b>F4`TK5}q=grBtd0(XW!^`?2rpN@!kk&h1v z=$K6~Rh(o(OV#F^EM?Um3}LweX7kPfHaTYVRUX~H6{TsjW@V+_fh3-6FKoWC3;5c{ zvSUVyT`*ME>ErYV@}!q?PD?1>feY+sjo7rEmZ^r4MAX}TE5G;JREgpjDul*e&JVLLU#E7R@uV6Xb`1uV z72S#PX*rlm==7hLYk$-}Y815${*-9%NKslv&Wd9{?5X?NvUC*eZrt0ZceY#O6mxt3;99cHf*#R=^%YRQ>m5w zEf)N@Zu1?X!-XLfFlN{p4GXJ(J8yREK#eYrtADD&8c-3ZpOdwf3!9hsaow|+$`S{6g)|`#o!{Co>K@f3>eXcU zl?#9Lbzj#RBQ0f(-zuzU{$P*&@;A8iv8~&0udv;{|1L(bgZ>Wxr}{E8lTHF zq-~r&%`rsyQ?p|P^d{1@{r&GvhF1cHQ6+od!p^I9+r}rc4-XVAyY1eQha^E++XZcm z<~^s>>t3FE&IgtgRH_-|jItpG1t zDT?r#`aiBS?sy@gNFb|1Ec~%Leb#VsEy%d-cxcr@SAIL-rp+ZqJVjLArCQE1USGF1 z)Lp=uF8@q#ac=$->$bN8BJwD%+@%f;>0ay&R5`hHovk#-hCWT$5t)0R@M ztu0qQ4*k)bM%F^pU3V%H>y$#xMZJ%wzGW<|+VM)W?+$%)%*5+3_X}OOE-yOf* zz2lSHa8*Ogv#ZWwku1(Sa^T;=j}LaE-z$-n+{u6Wt0=}aI$g_)4XrvplYnR!}QCm;Vu2b{9YUa<)UK!>OZaX<5W8-1>K5@cNFjCJWRaF zySCI<7SB==*3gHwli}4fU~+Bbz3;YaG>KVr#dYgkTlf2gA-iArp8w3E8|*A{`q3h1 zVqBEvqe#62aHDFMjy2Z$Ck2J{W1xvxHCOt@BN$7lXUY(?qgqygwnpIrWOO&2)U8Zi z*G*}ZfXvOC%Mp>ifBpflNPA~G=O}w9?b9?8u~@Ap8_7g|29TWJ(yP#&k>|{@U)FzZ^pahqKD+UXo6Gws=UOG`e|I!Rh(o9uY1{WV3XylHuwm1LhG{6Dx_JXHb`cf(1GQ2yL9t`u^@EW zK{nKSZg=XEKEML#TU!`sJutaAr?39$5pkOf__$8@H(xI#H92x!MeF^5OF<@r*?*rQ zqg+Mmx0Y>v;g9Qn4dC71%y)+3B*o0KcOapyQ#G;3KO^kKzC1?v_XbVT14jngB2~dp zz@a+%F~!^d?lG8dmRokq`ABlj^%D(dsuN(CDZ>HC);MZDhKu=WUFAT|s))qMpZY}f z-K5h5FWwPWZ+p{G_M6lC?W0HL@oJwNO(&#B-&E?5?mx+H0xFY6hMl~miaasg@1Qbc zD|(%`{R~Nw_RheE@`oWuuGj7IH2={3LmDK-N;^_;{}D0sbT<@NyT}gsoyP9<^$o%owr9u-MIS^$r@m8v z!?NLFvtN5{1KUA}2{#u<4ug0)rCUXLi9Yh%4t(k|q5|gyM*S9!Lr|Y1^a6g8oNv*$ zIVCkTm8Jj1R=PV4e`68{jv8s+%XAk^Ka>}g!UVM z9t#bIGVqw~I`{pdqo}GiWLY%X+YRze#ceh|Z%#^oqR#jLdOBzp&=QET_j8-t8<9DB zQ`ra~nJLm1PGFw70s;md`YDt?83Z}vDMhH7elu@BeRc^-y`F4qou6Xgo<#yMd}3PI zxILKfh`Rk;Ervx`7(}VSqL%q&ChOzN(^ZP|8l&FpfO7*ez4a=ck^UH1*{D53XEX{* zD{I;t>OSm$GB`rC=Y@6lC1?ErN)@&B()xEv{g^|R&^f#!!ypF9hmG|eVzU}U$0+><6G0p(c@P0E& z@0_B1N_aILH6f_fwAXh_EWx29ODSKIWKn0*6pXoXP@~3TCA-QbB1vC`3-CZS#>nu; zUau|vHe<5*>)_8<;usd3!rxiytk*rY4gK&!T-T`i_@@!3rI%YndEAhB#pv?9ilnB0L9O@E+GVGIj~|Me zLtf9cceR?&7Daa4-~vB0a1+hNk8ltx{*$-V9CDDaU-m&`vr(0KTXA>Si$3G;v?5WG zfT~xi`<1H_$!b$DyUP;dbrhGPIs7ANbQB&no!O`%R>1K+O8DkKk>RyxH`NmG66h=E ziQ5%dvF2J6{_|f&J3&W$Guiors|!+FvS!Pt%Bf!J$?tPwRuN0HZ$&9qe$xEQb-jIo z3AX1=(+ZR9C6zVge^oH%l$$3I2UBa{S^h?WLg=lFN93(1NH*Ju>B$mDibWTPiMz3{ zIH*@{${Io^g!ci8)LUJK>3C%ezmm*px9Kpb9B7MwgT53U&CCHnMGb7uY+kOKi5a|P z*~ncY{|sFtJ(<)JBVQAHcCHUJv`k%P6ESuYU^;S1{;`*n-$I+}9q7+n%apQrBTA4? zvs}Y&QP;XwQG5lPS!b!l?U{^r-ncHQDYJq>)}+J5;0?thdd<0opKTt&cY@3cv}~Y^ z8$MROHW8^jbCi%)1&);tWJTWZ)T1)Fn0J%%-H`AQ%>u@kX@mab92~l@KRDY_J+hWk zVC5p+)Jh17+o+l#gvlyKm5Hrf^UGf8-T<65w10_C#24JHeLV<#-#Tn+6(HQvIkUqk z*~d5#@+?<!BMtVEq4VOR3BXRREJBbn53(y;HWbb-I8;F3~`cv*HFE^$aR zdhYk8ZA(xp21y1@;`6z&Q1$U`YeFaF4ZuJ&^di{NAk3`I0@S4B{`UtydzIu|Hv3!l zQLV*of&^Fs{SW}&n>5cYnR_O(nCJo*O{YL54E&M8H7ioWn$7sIhGgEa2Zt5kgua&4 zeq{D_gM< z$8O{yas!`VnXi;?aS_w~Y%N~MycKI?J3My2GBmO=tqeVUvgkW0_v$y3&o^dkiIsM5 z)&t*HtpJQlx5295+wel(?cx>r-K@9ZB+e2e1{E>;9?df zg3=R*tt_7}vG(H*y3piDuD_W6q3l21V0y$;-Bg0@hAjM-a9Hi_^@=mk1p9kWj-p-Q zo&zvZu@fWj&1bfDF<90v$c6#2EWym@vp3ov&Vxl;j`sg!T-yZWWhA2jDF5Jn@2my- z^{OsmsH9CgEA%0%fyswe^Qg$@V6Xv1(Xk zmRId29T8Is$@zB|FRfNyz1+sbP>T|)25ur+RWq6XnfAYV|A+QgSVCaT9I3ogYji}s zRhx$#ocVuPrPR~o-L|eEpz0ppjTI_jSKI#= zIkogp6*iUGAVj`z*rAAkjU`B|-qG+Ij0ozEjxR;mTzSchp#bz;8AXYDE^ugp6l3+T zN<@Qkao|;{p$E7-!Y7Dx)UMAG>*_y$UK4xmL{Swz^|E-+Lhis@yWU`jm(yPBYLWKK z6n4_bg9N_Z19%pPQIta7m#lHTE>^d?&Fxb6V3docs for more details', + step: 1, + optional: true, + additionalParams: true + }, + { + label: 'Number of Thread', + name: 'numThread', + type: 'number', + description: + 'Sets the number of threads to use during computation. By default, Ollama will detect this for optimal performance. It is recommended to set this value to the number of physical CPU cores your system has (as opposed to the logical number of cores). Refer to docs for more details', + step: 1, + optional: true, + additionalParams: true + }, + { + label: 'Use MMap', + name: 'useMMap', + type: 'boolean', + optional: true, + additionalParams: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const modelName = nodeData.inputs?.modelName as string + const baseUrl = nodeData.inputs?.baseUrl as string + const numThread = nodeData.inputs?.numThread as string + const numGpu = nodeData.inputs?.numGpu as string + const useMMap = nodeData.inputs?.useMMap as boolean + + const obj = { + model: modelName, + baseUrl, + requestOptions: {} + } + + const requestOptions: OllamaInput = {} + if (numThread) requestOptions.numThread = parseFloat(numThread) + if (numGpu) requestOptions.numGpu = parseFloat(numGpu) + if (useMMap !== undefined) requestOptions.useMMap = useMMap + + if (Object.keys(requestOptions).length) obj.requestOptions = requestOptions + + const model = new OllamaEmbeddings(obj) + return model + } +} + +module.exports = { nodeClass: OllamaEmbedding_Embeddings } diff --git a/packages/components/nodes/embeddings/OllamaEmbedding/ollama.png b/packages/components/nodes/embeddings/OllamaEmbedding/ollama.png new file mode 100644 index 0000000000000000000000000000000000000000..8cd2cf1ed8043caf62e8b069330889c0cf0f5a3b GIT binary patch literal 7487 zcmai(cQhQ#_wb2?NFj+R!76Dw(R+;$5ky$MWU&$5vU=}Q7Ktuv^Ibd! zxAJZ-ZRbe*`$rax?}8NG4P1{`AyZMlL3{7|Y1c@PQ2)T})(rBm_oyhmtiJ|$sJH1< zs3AUc-gh)!=-NMN?bHQf#{U1?T4$TC)~9j3Fxfiddhl4&0~I*!Nf(Oe_TK`)FBluUSUW zoT=0BHlm$^);*S-+Ctzb-^N|!R+XmChcgDOVMjmd9_EZMMi~1sEH&pSXrq#5QR@!R zHzs_d6h#mIV~UGA?}lv-^~Y&`{*8PWix2e-RVJ>@zJK$$z{{!8;&0GKFl(!%l;8o&JCaD4yhSdf9sMR#Sk1 z9+SH!`2jIL{&1TXD1W6dfSbMG&q@g?{?t0D-gYSG%1Cj;r*k!Q=YxrgC`HeqS<1VK zk*2pn3Fg{s=trZ;h2h(;W8?meL-q!xyUI0X^AaNs~Qj^MM& zR~RAm_&>-P+5&#s+u{TRXGW-;F7d|uDI7j}_t%V)fXG}8zA(bt4rFx6Q4;upA;%T= zsTk`I6Ma3@Fz}r5M0g6-3decHw_)mtxZX1ZKTrR{@Zg99V(eK$;O=2w^_g0rzY&(R zy@4&~OvQT}(tsVBG=8`=*+G1=F5Xfl3K-B@j}=+;X5NE-MMs5iimA zgA+gQ;6ClYq!Emh4q~$D1*kp0B#%GEp1rZZW8pN<3=P5+UrNVCRhW-Kt}ktYzeMav znLo-`8VTTMqVJ>v`4w~8-|lG9=3}uw#GEHp>aK3=Ewq}wo6xxy5JlarQaC5DwsUyI z<(g7uLg=P%n$3||i6-1k*-@vAwojEunW^Bumo|uOq>B`ABz-08o@*?j?(!FL5_C*H z(+4^6ZVcYCM5BvFXc__Eh%EW!%?PkPb{nm6*K%8XlH6^J6zr0_wWUkWU@rQxuAwAL zmt2u`F%eybK8xnQyVSoR?KFe`t4=HMC4s@cfNx8Wacnb#&*yBiwN(x{5g+DfNSt`~ zN6jw~=O(}^gqUs6G_Q(-jckf8ZAo>+$}WLSm1R*-{&-lgw0|^PlzlugI!soR7&-&9 z1U7zP$va+{H4!Vok9QR;i4nzxxUm%0uB!bNbzAXw5Fh8EMuhjN6hX@RvVh{U zU3|9d9Q+4?8Lax<=oq`-1$zeT92N1?aS_Ig2A!ggHA{k@Ya>rS64>=6hcU{qfULif zH)8A_#on6Mt@rIk%B(HD3mz78G3RR;aa>J+^Z6}$Y>+o7MYC#6Z@aV{^k(z5_OH%Et zp*)(A$H1_2<*W#Lja`<7x6z%biOL>P$BLWv$;KnpIF%xW70K~%cxgj^$&BvD2!ObZ zhGCSw@q{35dfWtTxIBpbiCCnoAXbmA-5l-J#I*^~-AZ}lBTVog zQ?W*M;`7~h5ZSZxA@;y-Rv57gR#+pBl$!#SeP6J7x64bVmv0F0Wb8?*s!hTI-v;i)8Iq+^r3s>sd` zO_?8t@wquocIOB10x7;$MUDGaMYg#n>nWn(4;yOZ#%vQUs`Z> zy3DLa4tL~B8jIs1+{k^PqBX>uU7PCKvmI)L<)*@BHt!QUCe_*YAvTOi21rtLaT?E{ z5nlG5KhmM0c&z&7_T#RjqIWgwLvr9}; zH5rvkUeohC4Zwq5*mh%<%-x(}i3BG$>j&)oY_lFUp|HQ)UfX^qUW~8r)Q)#1L#tB} zd##<=_+;7mWu7oTF*}5OuGhBEXLFCJb?}Ol7C0DBVD=}o5+j|e(-YINL ztzf&*1>Q1mn(Qrgju^lhkj=%syDt!q?|eZJp$_)@@v$9QF%MtkS_k;LipS$aU6VGv z+tU13^aQmg(l~ToN|-sQ64*`Nm)QQ*Xycxc%-B|6JROXJ`B{z6B)8qVv{(A$6oa4m z5T-Ei^Yw3M`u2$b4H|RD9Q6Ecg501cV{y!R(3A(g)$@#js!qtzyp6-%vYj&jFcuJR z%%;5jEf1IN!%~}jRU^ILw4NF0X6DreW{PTq4{qABC3Px9!yOVjno!gP#pNNMsnnopR1i4Y~4uwX0mJQ84AU69@2 z^nj;Ku2L!^GNm5xWUe#Hs7A%65lVwXM&Nmr3KnWHU5HGAT`MFUQfV&(yJTCNi0gLn z=f~!g2W!b?doGoK%cI9(oE9ZUo6U#?3zNZDNUs(CfuyFJ3DPq%w}FJ?xm{o~txF)i zLvX|1QiB0OfBqMrPd09T=?q5}TCAH%YdU^HTub<)<@+7?%v73urd*hZ=_i}5om4PN z)aYniHn&7WS5u0J?{q<_P@iUmvdm_c%slkBV*$D9SbiP5bQ;Ca#V|(m@g3Mve)G&x zp|;d;c9To;{Zu3Nimp&Qida$FLZ+=K3}TLe5u+III>kHmL|z9!AaXva+0#9J4WhnY9znXyZAhznU{B+x zK{y~F1*c34-DNb6LBAQAV$@4|DE8i0T%J9c#Mp$h%1v#D@aW2#>-yHorTdo1$!QaM zzW_x`DU3Ib3VeN=2ciU!MhTr_a>8fZgW;?NQm#KnTfP*pH^h-U4O};(JPv2bl^w9P zwyiY4bl1YCv_{d&m14cyZK_QE+blqeN9p{rg)bTEZjKdWnz%PkLTj<0@v13@AV@7| zhU1b=5VEEAmp{9_Z~0CHs&=!qeFCBB^L=67;qvGXF={wU|LkDntI$ZD@~CqXTGn_^ zt-(+XdDG~n&ulLAAo8fHey2?IDqX9b>F4`TK5}q=grBtd0(XW!^`?2rpN@!kk&h1v z=$K6~Rh(o(OV#F^EM?Um3}LweX7kPfHaTYVRUX~H6{TsjW@V+_fh3-6FKoWC3;5c{ zvSUVyT`*ME>ErYV@}!q?PD?1>feY+sjo7rEmZ^r4MAX}TE5G;JREgpjDul*e&JVLLU#E7R@uV6Xb`1uV z72S#PX*rlm==7hLYk$-}Y815${*-9%NKslv&Wd9{?5X?NvUC*eZrt0ZceY#O6mxt3;99cHf*#R=^%YRQ>m5w zEf)N@Zu1?X!-XLfFlN{p4GXJ(J8yREK#eYrtADD&8c-3ZpOdwf3!9hsaow|+$`S{6g)|`#o!{Co>K@f3>eXcU zl?#9Lbzj#RBQ0f(-zuzU{$P*&@;A8iv8~&0udv;{|1L(bgZ>Wxr}{E8lTHF zq-~r&%`rsyQ?p|P^d{1@{r&GvhF1cHQ6+od!p^I9+r}rc4-XVAyY1eQha^E++XZcm z<~^s>>t3FE&IgtgRH_-|jItpG1t zDT?r#`aiBS?sy@gNFb|1Ec~%Leb#VsEy%d-cxcr@SAIL-rp+ZqJVjLArCQE1USGF1 z)Lp=uF8@q#ac=$->$bN8BJwD%+@%f;>0ay&R5`hHovk#-hCWT$5t)0R@M ztu0qQ4*k)bM%F^pU3V%H>y$#xMZJ%wzGW<|+VM)W?+$%)%*5+3_X}OOE-yOf* zz2lSHa8*Ogv#ZWwku1(Sa^T;=j}LaE-z$-n+{u6Wt0=}aI$g_)4XrvplYnR!}QCm;Vu2b{9YUa<)UK!>OZaX<5W8-1>K5@cNFjCJWRaF zySCI<7SB==*3gHwli}4fU~+Bbz3;YaG>KVr#dYgkTlf2gA-iArp8w3E8|*A{`q3h1 zVqBEvqe#62aHDFMjy2Z$Ck2J{W1xvxHCOt@BN$7lXUY(?qgqygwnpIrWOO&2)U8Zi z*G*}ZfXvOC%Mp>ifBpflNPA~G=O}w9?b9?8u~@Ap8_7g|29TWJ(yP#&k>|{@U)FzZ^pahqKD+UXo6Gws=UOG`e|I!Rh(o9uY1{WV3XylHuwm1LhG{6Dx_JXHb`cf(1GQ2yL9t`u^@EW zK{nKSZg=XEKEML#TU!`sJutaAr?39$5pkOf__$8@H(xI#H92x!MeF^5OF<@r*?*rQ zqg+Mmx0Y>v;g9Qn4dC71%y)+3B*o0KcOapyQ#G;3KO^kKzC1?v_XbVT14jngB2~dp zz@a+%F~!^d?lG8dmRokq`ABlj^%D(dsuN(CDZ>HC);MZDhKu=WUFAT|s))qMpZY}f z-K5h5FWwPWZ+p{G_M6lC?W0HL@oJwNO(&#B-&E?5?mx+H0xFY6hMl~miaasg@1Qbc zD|(%`{R~Nw_RheE@`oWuuGj7IH2={3LmDK-N;^_;{}D0sbT<@NyT}gsoyP9<^$o%owr9u-MIS^$r@m8v z!?NLFvtN5{1KUA}2{#u<4ug0)rCUXLi9Yh%4t(k|q5|gyM*S9!Lr|Y1^a6g8oNv*$ zIVCkTm8Jj1R=PV4e`68{jv8s+%XAk^Ka>}g!UVM z9t#bIGVqw~I`{pdqo}GiWLY%X+YRze#ceh|Z%#^oqR#jLdOBzp&=QET_j8-t8<9DB zQ`ra~nJLm1PGFw70s;md`YDt?83Z}vDMhH7elu@BeRc^-y`F4qou6Xgo<#yMd}3PI zxILKfh`Rk;Ervx`7(}VSqL%q&ChOzN(^ZP|8l&FpfO7*ez4a=ck^UH1*{D53XEX{* zD{I;t>OSm$GB`rC=Y@6lC1?ErN)@&B()xEv{g^|R&^f#!!ypF9hmG|eVzU}U$0+><6G0p(c@P0E& z@0_B1N_aILH6f_fwAXh_EWx29ODSKIWKn0*6pXoXP@~3TCA-QbB1vC`3-CZS#>nu; zUau|vHe<5*>)_8<;usd3!rxiytk*rY4gK&!T-T`i_@@!3rI%YndEAhB#pv?9ilnB0L9O@E+GVGIj~|Me zLtf9cceR?&7Daa4-~vB0a1+hNk8ltx{*$-V9CDDaU-m&`vr(0KTXA>Si$3G;v?5WG zfT~xi`<1H_$!b$DyUP;dbrhGPIs7ANbQB&no!O`%R>1K+O8DkKk>RyxH`NmG66h=E ziQ5%dvF2J6{_|f&J3&W$Guiors|!+FvS!Pt%Bf!J$?tPwRuN0HZ$&9qe$xEQb-jIo z3AX1=(+ZR9C6zVge^oH%l$$3I2UBa{S^h?WLg=lFN93(1NH*Ju>B$mDibWTPiMz3{ zIH*@{${Io^g!ci8)LUJK>3C%ezmm*px9Kpb9B7MwgT53U&CCHnMGb7uY+kOKi5a|P z*~ncY{|sFtJ(<)JBVQAHcCHUJv`k%P6ESuYU^;S1{;`*n-$I+}9q7+n%apQrBTA4? zvs}Y&QP;XwQG5lPS!b!l?U{^r-ncHQDYJq>)}+J5;0?thdd<0opKTt&cY@3cv}~Y^ z8$MROHW8^jbCi%)1&);tWJTWZ)T1)Fn0J%%-H`AQ%>u@kX@mab92~l@KRDY_J+hWk zVC5p+)Jh17+o+l#gvlyKm5Hrf^UGf8-T<65w10_C#24JHeLV<#-#Tn+6(HQvIkUqk z*~d5#@+?<!BMtVEq4VOR3BXRREJBbn53(y;HWbb-I8;F3~`cv*HFE^$aR zdhYk8ZA(xp21y1@;`6z&Q1$U`YeFaF4ZuJ&^di{NAk3`I0@S4B{`UtydzIu|Hv3!l zQLV*of&^Fs{SW}&n>5cYnR_O(nCJo*O{YL54E&M8H7ioWn$7sIhGgEa2Zt5kgua&4 zeq{D_gM< z$8O{yas!`VnXi;?aS_w~Y%N~MycKI?J3My2GBmO=tqeVUvgkW0_v$y3&o^dkiIsM5 z)&t*HtpJQlx5295+wel(?cx>r-K@9ZB+e2e1{E>;9?df zg3=R*tt_7}vG(H*y3piDuD_W6q3l21V0y$;-Bg0@hAjM-a9Hi_^@=mk1p9kWj-p-Q zo&zvZu@fWj&1bfDF<90v$c6#2EWym@vp3ov&Vxl;j`sg!T-yZWWhA2jDF5Jn@2my- z^{OsmsH9CgEA%0%fyswe^Qg$@V6Xv1(Xk zmRId29T8Is$@zB|FRfNyz1+sbP>T|)25ur+RWq6XnfAYV|A+QgSVCaT9I3ogYji}s zRhx$#ocVuPrPR~o-L|eEpz0ppjTI_jSKI#= zIkogp6*iUGAVj`z*rAAkjU`B|-qG+Ij0ozEjxR;mTzSchp#bz;8AXYDE^ugp6l3+T zN<@Qkao|;{p$E7-!Y7Dx)UMAG>*_y$UK4xmL{Swz^|E-+Lhis@yWU`jm(yPBYLWKK z6n4_bg9N_Z19%pPQIta7m#lHTE>^d?&Fxb6V3docs for more details', + step: 0.1, + default: 0.9, + optional: true + }, + { + label: 'Top P', + name: 'topP', + type: 'number', + description: + 'Works together with top-k. A higher value (e.g., 0.95) will lead to more diverse text, while a lower value (e.g., 0.5) will generate more focused and conservative text. (Default: 0.9). Refer to docs for more details', + step: 0.1, + optional: true, + additionalParams: true + }, + { + label: 'Top K', + name: 'topK', + type: 'number', + description: + 'Reduces the probability of generating nonsense. A higher value (e.g. 100) will give more diverse answers, while a lower value (e.g. 10) will be more conservative. (Default: 40). Refer to docs for more details', + step: 1, + optional: true, + additionalParams: true + }, + { + label: 'Mirostat', + name: 'mirostat', + type: 'number', + description: + 'Enable Mirostat sampling for controlling perplexity. (default: 0, 0 = disabled, 1 = Mirostat, 2 = Mirostat 2.0). Refer to docs for more details', + step: 1, + optional: true, + additionalParams: true + }, + { + label: 'Mirostat ETA', + name: 'mirostatEta', + type: 'number', + description: + 'Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive. (Default: 0.1) Refer to docs for more details', + step: 0.1, + optional: true, + additionalParams: true + }, + { + label: 'Mirostat TAU', + name: 'mirostatTau', + type: 'number', + description: + 'Controls the balance between coherence and diversity of the output. A lower value will result in more focused and coherent text. (Default: 5.0) Refer to docs for more details', + step: 0.1, + optional: true, + additionalParams: true + }, + { + label: 'Context Window Size', + name: 'numCtx', + type: 'number', + description: + 'Sets the size of the context window used to generate the next token. (Default: 2048) Refer to docs for more details', + step: 1, + optional: true, + additionalParams: true + }, + { + label: 'Number of GQA groups', + name: 'numGqa', + type: 'number', + description: + 'The number of GQA groups in the transformer layer. Required for some models, for example it is 8 for llama2:70b. Refer to docs for more details', + step: 1, + optional: true, + additionalParams: true + }, + { + label: 'Number of GPU', + name: 'numGpu', + type: 'number', + description: + 'The number of layers to send to the GPU(s). On macOS it defaults to 1 to enable metal support, 0 to disable. Refer to docs for more details', + step: 1, + optional: true, + additionalParams: true + }, + { + label: 'Number of Thread', + name: 'numThread', + type: 'number', + description: + 'Sets the number of threads to use during computation. By default, Ollama will detect this for optimal performance. It is recommended to set this value to the number of physical CPU cores your system has (as opposed to the logical number of cores). Refer to docs for more details', + step: 1, + optional: true, + additionalParams: true + }, + { + label: 'Repeat Last N', + name: 'repeatLastN', + type: 'number', + description: + 'Sets how far back for the model to look back to prevent repetition. (Default: 64, 0 = disabled, -1 = num_ctx). Refer to docs for more details', + step: 1, + optional: true, + additionalParams: true + }, + { + label: 'Repeat Penalty', + name: 'repeatPenalty', + type: 'number', + description: + 'Sets how strongly to penalize repetitions. A higher value (e.g., 1.5) will penalize repetitions more strongly, while a lower value (e.g., 0.9) will be more lenient. (Default: 1.1). Refer to docs for more details', + step: 0.1, + optional: true, + additionalParams: true + }, + { + label: 'Stop Sequence', + name: 'stop', + type: 'string', + rows: 4, + placeholder: 'AI assistant:', + description: + 'Sets the stop sequences to use. Use comma to seperate different sequences. Refer to docs for more details', + optional: true, + additionalParams: true + }, + { + label: 'Tail Free Sampling', + name: 'tfsZ', + type: 'number', + description: + 'Tail free sampling is used to reduce the impact of less probable tokens from the output. A higher value (e.g., 2.0) will reduce the impact more, while a value of 1.0 disables this setting. (Default: 1). Refer to docs for more details', + step: 0.1, + optional: true, + additionalParams: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const temperature = nodeData.inputs?.temperature as string + const baseUrl = nodeData.inputs?.baseUrl as string + const modelName = nodeData.inputs?.modelName as string + const topP = nodeData.inputs?.topP as string + const topK = nodeData.inputs?.topK as string + const mirostat = nodeData.inputs?.mirostat as string + const mirostatEta = nodeData.inputs?.mirostatEta as string + const mirostatTau = nodeData.inputs?.mirostatTau as string + const numCtx = nodeData.inputs?.numCtx as string + const numGqa = nodeData.inputs?.numGqa as string + const numGpu = nodeData.inputs?.numGpu as string + const numThread = nodeData.inputs?.numThread as string + const repeatLastN = nodeData.inputs?.repeatLastN as string + const repeatPenalty = nodeData.inputs?.repeatPenalty as string + const stop = nodeData.inputs?.stop as string + const tfsZ = nodeData.inputs?.tfsZ as string + + const cache = nodeData.inputs?.cache as BaseCache + + const obj: OllamaInput & BaseLLMParams = { + baseUrl, + temperature: parseFloat(temperature), + model: modelName + } + + if (topP) obj.topP = parseFloat(topP) + if (topK) obj.topK = parseFloat(topK) + if (mirostat) obj.mirostat = parseFloat(mirostat) + if (mirostatEta) obj.mirostatEta = parseFloat(mirostatEta) + if (mirostatTau) obj.mirostatTau = parseFloat(mirostatTau) + if (numCtx) obj.numCtx = parseFloat(numCtx) + if (numGqa) obj.numGqa = parseFloat(numGqa) + if (numGpu) obj.numGpu = parseFloat(numGpu) + if (numThread) obj.numThread = parseFloat(numThread) + if (repeatLastN) obj.repeatLastN = parseFloat(repeatLastN) + if (repeatPenalty) obj.repeatPenalty = parseFloat(repeatPenalty) + if (tfsZ) obj.tfsZ = parseFloat(tfsZ) + if (stop) { + const stopSequences = stop.split(',') + obj.stop = stopSequences + } + if (cache) obj.cache = cache + + const model = new Ollama(obj) + return model + } +} + +module.exports = { nodeClass: Ollama_LLMs } diff --git a/packages/components/nodes/llms/Ollama/ollama.png b/packages/components/nodes/llms/Ollama/ollama.png new file mode 100644 index 0000000000000000000000000000000000000000..8cd2cf1ed8043caf62e8b069330889c0cf0f5a3b GIT binary patch literal 7487 zcmai(cQhQ#_wb2?NFj+R!76Dw(R+;$5ky$MWU&$5vU=}Q7Ktuv^Ibd! zxAJZ-ZRbe*`$rax?}8NG4P1{`AyZMlL3{7|Y1c@PQ2)T})(rBm_oyhmtiJ|$sJH1< zs3AUc-gh)!=-NMN?bHQf#{U1?T4$TC)~9j3Fxfiddhl4&0~I*!Nf(Oe_TK`)FBluUSUW zoT=0BHlm$^);*S-+Ctzb-^N|!R+XmChcgDOVMjmd9_EZMMi~1sEH&pSXrq#5QR@!R zHzs_d6h#mIV~UGA?}lv-^~Y&`{*8PWix2e-RVJ>@zJK$$z{{!8;&0GKFl(!%l;8o&JCaD4yhSdf9sMR#Sk1 z9+SH!`2jIL{&1TXD1W6dfSbMG&q@g?{?t0D-gYSG%1Cj;r*k!Q=YxrgC`HeqS<1VK zk*2pn3Fg{s=trZ;h2h(;W8?meL-q!xyUI0X^AaNs~Qj^MM& zR~RAm_&>-P+5&#s+u{TRXGW-;F7d|uDI7j}_t%V)fXG}8zA(bt4rFx6Q4;upA;%T= zsTk`I6Ma3@Fz}r5M0g6-3decHw_)mtxZX1ZKTrR{@Zg99V(eK$;O=2w^_g0rzY&(R zy@4&~OvQT}(tsVBG=8`=*+G1=F5Xfl3K-B@j}=+;X5NE-MMs5iimA zgA+gQ;6ClYq!Emh4q~$D1*kp0B#%GEp1rZZW8pN<3=P5+UrNVCRhW-Kt}ktYzeMav znLo-`8VTTMqVJ>v`4w~8-|lG9=3}uw#GEHp>aK3=Ewq}wo6xxy5JlarQaC5DwsUyI z<(g7uLg=P%n$3||i6-1k*-@vAwojEunW^Bumo|uOq>B`ABz-08o@*?j?(!FL5_C*H z(+4^6ZVcYCM5BvFXc__Eh%EW!%?PkPb{nm6*K%8XlH6^J6zr0_wWUkWU@rQxuAwAL zmt2u`F%eybK8xnQyVSoR?KFe`t4=HMC4s@cfNx8Wacnb#&*yBiwN(x{5g+DfNSt`~ zN6jw~=O(}^gqUs6G_Q(-jckf8ZAo>+$}WLSm1R*-{&-lgw0|^PlzlugI!soR7&-&9 z1U7zP$va+{H4!Vok9QR;i4nzxxUm%0uB!bNbzAXw5Fh8EMuhjN6hX@RvVh{U zU3|9d9Q+4?8Lax<=oq`-1$zeT92N1?aS_Ig2A!ggHA{k@Ya>rS64>=6hcU{qfULif zH)8A_#on6Mt@rIk%B(HD3mz78G3RR;aa>J+^Z6}$Y>+o7MYC#6Z@aV{^k(z5_OH%Et zp*)(A$H1_2<*W#Lja`<7x6z%biOL>P$BLWv$;KnpIF%xW70K~%cxgj^$&BvD2!ObZ zhGCSw@q{35dfWtTxIBpbiCCnoAXbmA-5l-J#I*^~-AZ}lBTVog zQ?W*M;`7~h5ZSZxA@;y-Rv57gR#+pBl$!#SeP6J7x64bVmv0F0Wb8?*s!hTI-v;i)8Iq+^r3s>sd` zO_?8t@wquocIOB10x7;$MUDGaMYg#n>nWn(4;yOZ#%vQUs`Z> zy3DLa4tL~B8jIs1+{k^PqBX>uU7PCKvmI)L<)*@BHt!QUCe_*YAvTOi21rtLaT?E{ z5nlG5KhmM0c&z&7_T#RjqIWgwLvr9}; zH5rvkUeohC4Zwq5*mh%<%-x(}i3BG$>j&)oY_lFUp|HQ)UfX^qUW~8r)Q)#1L#tB} zd##<=_+;7mWu7oTF*}5OuGhBEXLFCJb?}Ol7C0DBVD=}o5+j|e(-YINL ztzf&*1>Q1mn(Qrgju^lhkj=%syDt!q?|eZJp$_)@@v$9QF%MtkS_k;LipS$aU6VGv z+tU13^aQmg(l~ToN|-sQ64*`Nm)QQ*Xycxc%-B|6JROXJ`B{z6B)8qVv{(A$6oa4m z5T-Ei^Yw3M`u2$b4H|RD9Q6Ecg501cV{y!R(3A(g)$@#js!qtzyp6-%vYj&jFcuJR z%%;5jEf1IN!%~}jRU^ILw4NF0X6DreW{PTq4{qABC3Px9!yOVjno!gP#pNNMsnnopR1i4Y~4uwX0mJQ84AU69@2 z^nj;Ku2L!^GNm5xWUe#Hs7A%65lVwXM&Nmr3KnWHU5HGAT`MFUQfV&(yJTCNi0gLn z=f~!g2W!b?doGoK%cI9(oE9ZUo6U#?3zNZDNUs(CfuyFJ3DPq%w}FJ?xm{o~txF)i zLvX|1QiB0OfBqMrPd09T=?q5}TCAH%YdU^HTub<)<@+7?%v73urd*hZ=_i}5om4PN z)aYniHn&7WS5u0J?{q<_P@iUmvdm_c%slkBV*$D9SbiP5bQ;Ca#V|(m@g3Mve)G&x zp|;d;c9To;{Zu3Nimp&Qida$FLZ+=K3}TLe5u+III>kHmL|z9!AaXva+0#9J4WhnY9znXyZAhznU{B+x zK{y~F1*c34-DNb6LBAQAV$@4|DE8i0T%J9c#Mp$h%1v#D@aW2#>-yHorTdo1$!QaM zzW_x`DU3Ib3VeN=2ciU!MhTr_a>8fZgW;?NQm#KnTfP*pH^h-U4O};(JPv2bl^w9P zwyiY4bl1YCv_{d&m14cyZK_QE+blqeN9p{rg)bTEZjKdWnz%PkLTj<0@v13@AV@7| zhU1b=5VEEAmp{9_Z~0CHs&=!qeFCBB^L=67;qvGXF={wU|LkDntI$ZD@~CqXTGn_^ zt-(+XdDG~n&ulLAAo8fHey2?IDqX9b>F4`TK5}q=grBtd0(XW!^`?2rpN@!kk&h1v z=$K6~Rh(o(OV#F^EM?Um3}LweX7kPfHaTYVRUX~H6{TsjW@V+_fh3-6FKoWC3;5c{ zvSUVyT`*ME>ErYV@}!q?PD?1>feY+sjo7rEmZ^r4MAX}TE5G;JREgpjDul*e&JVLLU#E7R@uV6Xb`1uV z72S#PX*rlm==7hLYk$-}Y815${*-9%NKslv&Wd9{?5X?NvUC*eZrt0ZceY#O6mxt3;99cHf*#R=^%YRQ>m5w zEf)N@Zu1?X!-XLfFlN{p4GXJ(J8yREK#eYrtADD&8c-3ZpOdwf3!9hsaow|+$`S{6g)|`#o!{Co>K@f3>eXcU zl?#9Lbzj#RBQ0f(-zuzU{$P*&@;A8iv8~&0udv;{|1L(bgZ>Wxr}{E8lTHF zq-~r&%`rsyQ?p|P^d{1@{r&GvhF1cHQ6+od!p^I9+r}rc4-XVAyY1eQha^E++XZcm z<~^s>>t3FE&IgtgRH_-|jItpG1t zDT?r#`aiBS?sy@gNFb|1Ec~%Leb#VsEy%d-cxcr@SAIL-rp+ZqJVjLArCQE1USGF1 z)Lp=uF8@q#ac=$->$bN8BJwD%+@%f;>0ay&R5`hHovk#-hCWT$5t)0R@M ztu0qQ4*k)bM%F^pU3V%H>y$#xMZJ%wzGW<|+VM)W?+$%)%*5+3_X}OOE-yOf* zz2lSHa8*Ogv#ZWwku1(Sa^T;=j}LaE-z$-n+{u6Wt0=}aI$g_)4XrvplYnR!}QCm;Vu2b{9YUa<)UK!>OZaX<5W8-1>K5@cNFjCJWRaF zySCI<7SB==*3gHwli}4fU~+Bbz3;YaG>KVr#dYgkTlf2gA-iArp8w3E8|*A{`q3h1 zVqBEvqe#62aHDFMjy2Z$Ck2J{W1xvxHCOt@BN$7lXUY(?qgqygwnpIrWOO&2)U8Zi z*G*}ZfXvOC%Mp>ifBpflNPA~G=O}w9?b9?8u~@Ap8_7g|29TWJ(yP#&k>|{@U)FzZ^pahqKD+UXo6Gws=UOG`e|I!Rh(o9uY1{WV3XylHuwm1LhG{6Dx_JXHb`cf(1GQ2yL9t`u^@EW zK{nKSZg=XEKEML#TU!`sJutaAr?39$5pkOf__$8@H(xI#H92x!MeF^5OF<@r*?*rQ zqg+Mmx0Y>v;g9Qn4dC71%y)+3B*o0KcOapyQ#G;3KO^kKzC1?v_XbVT14jngB2~dp zz@a+%F~!^d?lG8dmRokq`ABlj^%D(dsuN(CDZ>HC);MZDhKu=WUFAT|s))qMpZY}f z-K5h5FWwPWZ+p{G_M6lC?W0HL@oJwNO(&#B-&E?5?mx+H0xFY6hMl~miaasg@1Qbc zD|(%`{R~Nw_RheE@`oWuuGj7IH2={3LmDK-N;^_;{}D0sbT<@NyT}gsoyP9<^$o%owr9u-MIS^$r@m8v z!?NLFvtN5{1KUA}2{#u<4ug0)rCUXLi9Yh%4t(k|q5|gyM*S9!Lr|Y1^a6g8oNv*$ zIVCkTm8Jj1R=PV4e`68{jv8s+%XAk^Ka>}g!UVM z9t#bIGVqw~I`{pdqo}GiWLY%X+YRze#ceh|Z%#^oqR#jLdOBzp&=QET_j8-t8<9DB zQ`ra~nJLm1PGFw70s;md`YDt?83Z}vDMhH7elu@BeRc^-y`F4qou6Xgo<#yMd}3PI zxILKfh`Rk;Ervx`7(}VSqL%q&ChOzN(^ZP|8l&FpfO7*ez4a=ck^UH1*{D53XEX{* zD{I;t>OSm$GB`rC=Y@6lC1?ErN)@&B()xEv{g^|R&^f#!!ypF9hmG|eVzU}U$0+><6G0p(c@P0E& z@0_B1N_aILH6f_fwAXh_EWx29ODSKIWKn0*6pXoXP@~3TCA-QbB1vC`3-CZS#>nu; zUau|vHe<5*>)_8<;usd3!rxiytk*rY4gK&!T-T`i_@@!3rI%YndEAhB#pv?9ilnB0L9O@E+GVGIj~|Me zLtf9cceR?&7Daa4-~vB0a1+hNk8ltx{*$-V9CDDaU-m&`vr(0KTXA>Si$3G;v?5WG zfT~xi`<1H_$!b$DyUP;dbrhGPIs7ANbQB&no!O`%R>1K+O8DkKk>RyxH`NmG66h=E ziQ5%dvF2J6{_|f&J3&W$Guiors|!+FvS!Pt%Bf!J$?tPwRuN0HZ$&9qe$xEQb-jIo z3AX1=(+ZR9C6zVge^oH%l$$3I2UBa{S^h?WLg=lFN93(1NH*Ju>B$mDibWTPiMz3{ zIH*@{${Io^g!ci8)LUJK>3C%ezmm*px9Kpb9B7MwgT53U&CCHnMGb7uY+kOKi5a|P z*~ncY{|sFtJ(<)JBVQAHcCHUJv`k%P6ESuYU^;S1{;`*n-$I+}9q7+n%apQrBTA4? zvs}Y&QP;XwQG5lPS!b!l?U{^r-ncHQDYJq>)}+J5;0?thdd<0opKTt&cY@3cv}~Y^ z8$MROHW8^jbCi%)1&);tWJTWZ)T1)Fn0J%%-H`AQ%>u@kX@mab92~l@KRDY_J+hWk zVC5p+)Jh17+o+l#gvlyKm5Hrf^UGf8-T<65w10_C#24JHeLV<#-#Tn+6(HQvIkUqk z*~d5#@+?<!BMtVEq4VOR3BXRREJBbn53(y;HWbb-I8;F3~`cv*HFE^$aR zdhYk8ZA(xp21y1@;`6z&Q1$U`YeFaF4ZuJ&^di{NAk3`I0@S4B{`UtydzIu|Hv3!l zQLV*of&^Fs{SW}&n>5cYnR_O(nCJo*O{YL54E&M8H7ioWn$7sIhGgEa2Zt5kgua&4 zeq{D_gM< z$8O{yas!`VnXi;?aS_w~Y%N~MycKI?J3My2GBmO=tqeVUvgkW0_v$y3&o^dkiIsM5 z)&t*HtpJQlx5295+wel(?cx>r-K@9ZB+e2e1{E>;9?df zg3=R*tt_7}vG(H*y3piDuD_W6q3l21V0y$;-Bg0@hAjM-a9Hi_^@=mk1p9kWj-p-Q zo&zvZu@fWj&1bfDF<90v$c6#2EWym@vp3ov&Vxl;j`sg!T-yZWWhA2jDF5Jn@2my- z^{OsmsH9CgEA%0%fyswe^Qg$@V6Xv1(Xk zmRId29T8Is$@zB|FRfNyz1+sbP>T|)25ur+RWq6XnfAYV|A+QgSVCaT9I3ogYji}s zRhx$#ocVuPrPR~o-L|eEpz0ppjTI_jSKI#= zIkogp6*iUGAVj`z*rAAkjU`B|-qG+Ij0ozEjxR;mTzSchp#bz;8AXYDE^ugp6l3+T zN<@Qkao|;{p$E7-!Y7Dx)UMAG>*_y$UK4xmL{Swz^|E-+Lhis@yWU`jm(yPBYLWKK z6n4_bg9N_Z19%pPQIta7m#lHTE>^d?&Fxb6V3 { const streamAvailableLLMs = { - 'Chat Models': ['azureChatOpenAI', 'chatOpenAI', 'chatAnthropic'], - LLMs: ['azureOpenAI', 'openAI'] + 'Chat Models': ['azureChatOpenAI', 'chatOpenAI', 'chatAnthropic', 'chatOllama'], + LLMs: ['azureOpenAI', 'openAI', 'ollama'] } let isChatOrLLMsExist = false