From e0a0a8976db2bb4cbf060f54ab2f2598292c2ace Mon Sep 17 00:00:00 2001
From: Timothy Carambat <rambat1010@gmail.com>
Date: Wed, 27 Dec 2023 17:21:47 -0800
Subject: [PATCH] Add Ollama as LLM provider option (#494)

* Add support for Ollama as LLM provider
resolves #493
---
 .vscode/settings.json                         |   1 +
 README.md                                     |   1 +
 docker/.env.example                           |   5 +
 .../LLMSelection/OllamaLLMOptions/index.jsx   | 120 ++++++++++
 frontend/src/media/llmprovider/ollama.png     | Bin 0 -> 23630 bytes
 .../GeneralSettings/LLMPreference/index.jsx   |  14 ++
 .../Steps/DataHandling/index.jsx              |   8 +
 .../Steps/LLMSelection/index.jsx              |  24 +-
 server/.env.example                           |   5 +
 server/models/systemSettings.js               |  14 ++
 server/utils/AiProviders/ollama/index.js      | 208 ++++++++++++++++++
 server/utils/chats/stream.js                  |  29 +++
 server/utils/helpers/customModels.js          |  35 ++-
 server/utils/helpers/index.js                 |   3 +
 server/utils/helpers/updateENV.js             |  25 +++
 15 files changed, 486 insertions(+), 6 deletions(-)
 create mode 100644 frontend/src/components/LLMSelection/OllamaLLMOptions/index.jsx
 create mode 100644 frontend/src/media/llmprovider/ollama.png
 create mode 100644 server/utils/AiProviders/ollama/index.js

diff --git a/.vscode/settings.json b/.vscode/settings.json
index dde2d134b..459f57fc3 100644
--- a/.vscode/settings.json
+++ b/.vscode/settings.json
@@ -1,5 +1,6 @@
 {
   "cSpell.words": [
+    "Ollama",
     "openai",
     "Qdrant",
     "Weaviate"
diff --git a/README.md b/README.md
index 44e0557fa..36127cb35 100644
--- a/README.md
+++ b/README.md
@@ -59,6 +59,7 @@ Some cool features of AnythingLLM
 - [Azure OpenAI](https://azure.microsoft.com/en-us/products/ai-services/openai-service)
 - [Anthropic ClaudeV2](https://www.anthropic.com/)
 - [Google Gemini Pro](https://ai.google.dev/)
+- [Ollama (chat models)](https://ollama.ai/)
 - [LM Studio (all models)](https://lmstudio.ai)
 - [LocalAi (all models)](https://localai.io/)
 
diff --git a/docker/.env.example b/docker/.env.example
index cc9fa06fc..0db90aa23 100644
--- a/docker/.env.example
+++ b/docker/.env.example
@@ -35,6 +35,11 @@ GID='1000'
 # LOCAL_AI_MODEL_TOKEN_LIMIT=4096
 # LOCAL_AI_API_KEY="sk-123abc"
 
+# LLM_PROVIDER='ollama'
+# OLLAMA_BASE_PATH='http://host.docker.internal:11434'
+# OLLAMA_MODEL_PREF='llama2'
+# OLLAMA_MODEL_TOKEN_LIMIT=4096
+
 ###########################################
 ######## Embedding API SElECTION ##########
 ###########################################
diff --git a/frontend/src/components/LLMSelection/OllamaLLMOptions/index.jsx b/frontend/src/components/LLMSelection/OllamaLLMOptions/index.jsx
new file mode 100644
index 000000000..a2034bf75
--- /dev/null
+++ b/frontend/src/components/LLMSelection/OllamaLLMOptions/index.jsx
@@ -0,0 +1,120 @@
+import { useEffect, useState } from "react";
+import System from "@/models/system";
+
+export default function OllamaLLMOptions({ settings }) {
+  const [basePathValue, setBasePathValue] = useState(
+    settings?.OllamaLLMBasePath
+  );
+  const [basePath, setBasePath] = useState(settings?.OllamaLLMBasePath);
+
+  return (
+    <div className="w-full flex flex-col gap-y-4">
+      <div className="w-full flex items-center gap-4">
+        <div className="flex flex-col w-60">
+          <label className="text-white text-sm font-semibold block mb-4">
+            Ollama Base URL
+          </label>
+          <input
+            type="url"
+            name="OllamaLLMBasePath"
+            className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
+            placeholder="http://127.0.0.1:11434"
+            defaultValue={settings?.OllamaLLMBasePath}
+            required={true}
+            autoComplete="off"
+            spellCheck={false}
+            onChange={(e) => setBasePathValue(e.target.value)}
+            onBlur={() => setBasePath(basePathValue)}
+          />
+        </div>
+        <OllamaLLMModelSelection settings={settings} basePath={basePath} />
+        <div className="flex flex-col w-60">
+          <label className="text-white text-sm font-semibold block mb-4">
+            Token context window
+          </label>
+          <input
+            type="number"
+            name="OllamaLLMTokenLimit"
+            className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
+            placeholder="4096"
+            min={1}
+            onScroll={(e) => e.target.blur()}
+            defaultValue={settings?.OllamaLLMTokenLimit}
+            required={true}
+            autoComplete="off"
+          />
+        </div>
+      </div>
+    </div>
+  );
+}
+
+function OllamaLLMModelSelection({ settings, basePath = null }) {
+  const [customModels, setCustomModels] = useState([]);
+  const [loading, setLoading] = useState(true);
+
+  useEffect(() => {
+    async function findCustomModels() {
+      if (!basePath) {
+        setCustomModels([]);
+        setLoading(false);
+        return;
+      }
+      setLoading(true);
+      const { models } = await System.customModels("ollama", null, basePath);
+      setCustomModels(models || []);
+      setLoading(false);
+    }
+    findCustomModels();
+  }, [basePath]);
+
+  if (loading || customModels.length == 0) {
+    return (
+      <div className="flex flex-col w-60">
+        <label className="text-white text-sm font-semibold block mb-4">
+          Chat Model Selection
+        </label>
+        <select
+          name="OllamaLLMModelPref"
+          disabled={true}
+          className="bg-zinc-900 border border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
+        >
+          <option disabled={true} selected={true}>
+            {!!basePath
+              ? "-- loading available models --"
+              : "-- waiting for URL --"}
+          </option>
+        </select>
+      </div>
+    );
+  }
+
+  return (
+    <div className="flex flex-col w-60">
+      <label className="text-white text-sm font-semibold block mb-4">
+        Chat Model Selection
+      </label>
+      <select
+        name="OllamaLLMModelPref"
+        required={true}
+        className="bg-zinc-900 border border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
+      >
+        {customModels.length > 0 && (
+          <optgroup label="Your loaded models">
+            {customModels.map((model) => {
+              return (
+                <option
+                  key={model.id}
+                  value={model.id}
+                  selected={settings.OllamaLLMModelPref === model.id}
+                >
+                  {model.id}
+                </option>
+              );
+            })}
+          </optgroup>
+        )}
+      </select>
+    </div>
+  );
+}
diff --git a/frontend/src/media/llmprovider/ollama.png b/frontend/src/media/llmprovider/ollama.png
new file mode 100644
index 0000000000000000000000000000000000000000..2a898a6ebbd85026715453aed13885742cbb7712
GIT binary patch
literal 23630
zcmeAS@N?(olHy`uVBq!ia0y~yVDtiE4mJh`hH6bQRt5$J&H|6fVg?4jBOuH;Rhv(m
zfq}uM#5JPCIX^cyHLrxBqR1-6%Ei&GFt^gwurM#w%FtLR%gWTmz|zRr%)-FT%+%P(
z!pO+XRHwws$k@=x$lS=dvU_tU0|U$Z%#etZ2wxwo<osN{#FYG`RK1Ga0tOJUsj#ZZ
zEyztRNmQuF&B-gas<2fDtFX!|wgL(3tJ`rYK!HtBN}5%WiyKsFQA(PvdPYe}ft9{~
zd3m{Bxv^e;QM$gNrKP35fswwEk#12+nr?ArUP)qwZeFpnx*Zp)Q7);)$wiq3C7Jno
z3Lv8slk!VTY}M7_svu5ENrqcfP+F7&wj?E4KQ$*66fnj5hI)o5u1ra`D#<L#NkwzG
zV@?U0NKPimCssL$dFi(56}l;@X^EvdCF*ved0_o13Mr|@ndx~7x(fL@If=Q6Hu@k}
z=42un0~SFwSs#o2U}Z?&B{L)(OG=6|li;C|SFD!^7AXY<p<Z%+u6|+)C_D^M!ULkq
z$}y!F$>AWM1g93FiR5I0BcdR&s5mvGvLMw~Jt(y}zqBYh6*(C|4RkI_O)N=Gu`@C-
zGS)RT(ls)-(MOYl$D|EZ!nLBrGsP|`EjiiJ($GNH+}tun*Tm4mT-U<DG+EccFx5OI
zIVHu^*dz^Z7DNk%Np4A=DR!ntrbcF_mWC#V=9Z=w25>!KNenF^`K8GjAaB_j!Brx}
z5X!-^g&EDL!3FawYOsQ}QXbI-NLdRM%iuf<ir<2iG<`!oW0d$UNJ+DDEG@~%FR}~C
z%vEqsEK1BxN-VL_N0LDE9OW*o$Spughj15GBo^o!>Y=8<irfM#aB9gf3dzsUv2#w$
zD@(M|M;FHDNN5qHfTkwM#SJM5>%&VkyCVii8W<SD_IkQFhE&XXGq<uJBz4#S{mZ|r
z?oEC9=C_}a2NR=;14AUMi%3XGNr{Z?_L;A*&N@45<)>@wUrjSD&Q6;%)B04#&YtV6
ztfC=PI)t`uXlL2<zWUP5UDhx6{+{#s_hBnZfl2S|UX}=bs$2EP{nwn&%lzlRue_JO
zG-6t+qk;fO3#Z_jFH9UPOo`i^TP8Uu2<&kfrHBs#g;_6I1^%?BMqV)C-F}<5E%Cr6
z9fpQQ8XT=o9WF{7txf?VriZoX#>Z60XUvlO|NqIpGqoK7=L93Bu^u)ku$sFeYVC_x
zubhgDjeq_A{jhAeW1xtOlHj7nizm*U*?HqegoX&0hl&sf3)80pKjAyIg*#(5MCiPD
z_s%UjSsA3`V}(PY$dM#NkdA57+IH-)(3t8~6#q=ZMdLXG&*1|P3l{9%E8EU5f9&Jq
z;}@@8V_SWdYx?QZxvrWMJr;y$O`JEc&ouj5!r?Yv4?n-Q5Uo}R2OHhSmfiYi&z{xL
z)jirNtbXCrC8pVD%P#+36tr@}tXW+k^`D=e_3-de_@3J5u<_1al`~cr*-Q8CmHqww
z{qcj%>=!RyY@9ez&_brqvw!O8ryRVzu31@HWp8dY%37BxSX$1snZnq#EYrl?{PBTC
zW{a9122s(`i6&C&d-X+J9n~f)>ge#CJbCiQ&SLd<JD<xPId)9wb<s5EmLjp_%{L1^
zJ@MT0@7HUK;%7WOY==wt>BkFnx^#4P9Xd5t`{N1a{)Dr$Oec6Py}aeo&&I<K9WpaD
z?^QmRonu+d*4*6up~9wa?&Dj(KRuc3pLBCm>V%Uim$xJx3K87*<&wAl*)K{Q=1C?}
zA73t?|LA=EKV!f7cD)ZDCVr?~ZphPo@nT>}Ny&npJ0+DSa%gI5y699U8O{9gZuk2~
z^8bIZpPOUZJaM9+0ng*_mFxEE^YZdu$S~>X?p98+^j5Sx6?x*s365{?@AvPm{{HCI
z)zt;N?*`jFzi~t2+&o+E?xRez&$hA4RWL-YHDg#k*S@Ex=fT?T_YPUV-_zXI#<nA7
zo#y=FoE)C?b8{N+=CvPwc;MUH+b3&hwhD9?O*1qzYxB4J$+CX$H>p*tR{gnm$(POd
zvV@eBQ+c`hlOoGan>Hy-^k6Fc7cDw1HF0ay$8WduANQKyb8vQcc8H6UG4VZ|V6b5M
za`uN0A9{FsDSc0!=Gams_V8oHhp*S;A8)-Lclc<x_}_2tznBgiNXW^x9ZYbDh>+N{
zX_H4ox=_Tl*4uB79ha}~xgJ-|`}ONr#@fn410Lq)gAEfW3LZInR8dp2^Um~0mYu?z
zJG#2K_I$rry`$vir7bo+GI`sjLqkI=e!tyb`0r0;+`jWanS3v6=<6S!oxiVh*REX+
zQIqVCye)ftW~On)|G(b@V`FV|uPgrG7d#c&bTHw7yL@fSWOe^TX`6k2e|=MCeQvIG
zd-=Ue_S)K77oB`2-^(vvy*d<~zqj@Dbp69GOZ<MDt_{=P_xD?L&9|HB5>isLzP%Q#
zY36vU)37YF=JQ!|P>Mfrz~RC=Ya^qkOP7LVtV%fimQU_@YPvy(d-vVGW_JEZyWj6S
z{NhE%g>`pt-?om6FpzyG-t=E!;he;XX|B=H(tI)&4a=4-TM)m0_ikzVx*v+KUcatf
zy7SJe3LCkQ@bKjO`)Xgjf6xEx>+0333qL$?EGaEz{d=oLTG7d}xuCHLWW%SYr{h-V
zE@R$)yLZ+ssW-Q`^NV%2Moj(jZ{G|bH4vAVm-q53{dpYcOg~)>k3W0jE9Z6RmLj&q
zNU^zd=Yq0RRaI45j;yk>@{eDy*9V4&yUTeo=uPJ~H8r&;dcxtOcDZNm;<ame-TP!5
z%gev_tZn>Y>!Oh^(0{zSnVo;p%9Wkl9XwTpRMgaz?Cj>1zmq*|@Z!yzBcLeWxN&1;
zZfMc9Cm_zfdvW*fw}xoBIyo^tdi+>Xth;h<^X6~-@^(IVl#F=<Pf0c<O7QUUJvuQ_
zc~5OZ&`Ock*4B!TN5wzBj{h&k%F4ROTi91k7!)B}wrx9?z53GIvc)S`a^A1~E_>|Q
zv5L9QYLgePSizyc?}t*|=h^oa^!59<N0gP8KD?d3|LBPmC-&4eo;6w0QrPyP!bU?|
z+c`U1d;J}oi5@Ly&iMR!6~6z_uF}^Ifg(xC$shm!yZ`_2RPFFTV%BYm#}=>6%*_0}
zsi!YBvZt@_(P{ntEe8`Gq#pHD5qk3c`QxMF@kd@=UH!0Vr+}1H*Z*JZ|2qW-Pre_R
zv;1;rTN|5<eVt7C-BR%ur-k>km3(BA0?#q%oIaGc`QhyReGjMdxde(VTD0iH1!w++
zOO~)?ZI#lR%Jur%+QW<c?Ye&asEE_u?V~2V@8>gV9zMQvr`O6IHhA&={qc*7-76y}
zGc_`3YHI#C>0j4mntjb-VSvNxt4VKfZLRolkX=wt?%eBrU%q^KFgt(W&UMFC6?X_M
zoWqzg%Pl%udey2`2^%B+*v08hU%X-k$L_MXO<}89T>}?ra49M(-q>4h9=1M?cdFOZ
z@0EA+%x~SkUHI<K&IKxa&zr3Y(*~7Y=g#@Xy<=Y+)?NDgTH^P2cPE@odHh|~#ztm$
z`Fpp-L`D7UCtkeJ(9+hvc<-K_etg1aMx`mn4X2+TIyqV0(7@oq_Eu0(Cnar2KQHIv
z<`x&uuOq-AASTw<E?+0{^5x4tS*>T&jQ9P1x4Uw>))Cgi9sBRbvoJ9h6%~PWWSGo)
zt>fP4&~W3%jgQsO)`y;q+_l_qt`g@9V-~?v3Wq~$=GfIrrER|X{_)cy%TJ#_Z_K*7
zYIc;m(?WsM({vB73SBKACH3n2&$VIN=jPk@@7uSp@@qhqjog=S-yS_W+P&vf<ku=&
zFE6hRDJLgMw@g^1p&`PxCT_1(Wo6}_DnCAc{>NWlUQYRK>nyO4$H>HY@$%*DU%!6c
zvu%6TUb*P)d97iq_pUs8!gh*v*&7EhFE0JpsRacB;`(uO!f)qyA8q0m*Gs6Vxbgn2
zu4~|lbLWmt(~VXWaXfeH{;pE(Lx&FiiJN~m%{ez$x2n4O<hN=z1t-ar7(H>l_<d)z
z?>cLUC@Cs3W?x_Tb(V2tNJxuox0s@e%8~b`w{py;OrQSwR@1rAGijj8h<Eqhci%Tk
z^|H0Lww^eDe*Tx@qMb6?*VnoF`1IVrn7z(6E;aS(9>4puG&=&=B$r?A^q#KwQAlyF
zU;ES3)1B8}_s`M{5@B5$)H!JqD0}~^mEC=}&wIMw!4#vj+^<^`C3<>#7A#uS6n_2F
zmMtc0A~rH{wI*FU&&k5%;_8}sb5m-->Z^6}$8*dcUs~#Y`PQr6g5r(<Ho<fDl}}HJ
zMyw6n-{0TS;o#}Xc_eA0fAp)V+Tlv7sz={1;PbsKAtB-5?0mTA%6xX|*=O5+{i=$~
z3}k9#h>VPk*j2Lf`uU<@32!g2il6<*j~xpL4R!r9#h5|xl)|Hr6&LQ_?cKM}ZvR@Q
zg#jHiW=QD7?YZ$hH`>RCr>MBN^5>B^W!75S+LfHYVv=)mc&1LBx@V2&-njlbb7Wjx
zU4MS5+!)cL>OJkwgpCil^`>+0-Mjb2%a=358@JsAnSUrW^25!tY!j&`Po5~4m|S_z
z&>NxS=IF@y=<(yf`{tc~>J%D!b&uewRjah(qHkTf67uBfQ%99gYftzp2q@Ut$Z(74
zJa}p^CoSz<TWfo>j$Kk|isO-|MGM!h>$_fNW@976%F0^D&M2lE)zZ?!vVZ;YwO_-&
z2N(VG+_$+PC^&dy!a=6}eS)1X8ag^pLe);~<n(Y*-yyJ&M@erwx3917o)#k=9Ug9R
zy)V{x)$Q!$PM$m&pf$Bl_PA^0?p?c@jF=La6<El;c=hVh)Yb(-D<@2v)HGv;#Qr{E
zuGWYx8G`E@`p%{~Cnqa!)M(}rJf(1uwdmHZTYtpl76x>5bZ{(Lw(M7J>`b4wEn7_H
z*JfOQRJQx#jT;r-&x50M#O|G~OioUoIA_kA^2kL28ZK2;R-mMOPtih#Z_D=WlP}6T
zwsE9RSdb}o`0!x~Ik|7Q1?7c=ntuGKSaCHg&R6u(<;#NN;?KVqes<JY8>THKEnUg#
zK55dV2~(y#`CcgE>bUr#f}$d0(at;TlWpbr+uGVHRF4V&aK5#Mm2>*(qt()@FTb2H
zYt|`m6-EV1OUa8DFV@LF|5$P1(xpS%oWI)*c$imT?V31IaE6cCjI(JGYr}5jnC-Zm
zXEE1r$K7{@+MX&xj!F|9bi|z1COa++a9A7Wy!fJmiHV3*@3H45D^G5STAPuX`B2sB
z<HwJC{_H>S-EnSD56_CLS^Jx-`P!N9-o5ML?X8@9y$MwQJS^LN@$%)v+1<-8zpRMV
zlQA?j?C9)ljL`XZdb{go6Dw=$#LYJkY+4h)$mOz$o4b3WfrNpDMMq;J<CkyW7A;=P
z3`%wH-yi?*!N5mNScZ>XM~qvB@A!@c&!s^cB3vh)7Oe=;%Get9;%n8BBS#eM?BqUv
z{Mgac<Kp7N@}<i5(BZ?1Q@#F1DWBd1>M^`|^CenCRfzM^qelXgk}t0p%80lsnwm}x
zpRPXr^wHnjT^_Ito>Bn0`S$J8r#a8(UjDHtKtn)QmUnGf_rZgX^ZSz}*qoi64QBc*
z$h?)!)pPr8+sPEC_;~rM>gtPEuC%OLrFG@Xl?59&UR=+y>+;K%%P(6lX6$*nzj(3O
zrp=o-7Cdx%^6VL?(7Jf>;)T~=C!Bs7=X&|^$Apv=mcxe+*ZuzO<m@acD*E*MwVBO}
zG*r~pKYIy2`lQ(zz{amM#qrSFvc>!N&%fRuy)@|KEvtC%kPs0iW#zv@UJve1+i>{d
zj+f474S00KxOv!^t*osJe|}1xV^PQ?t{>-8U;lp#tIQlf^;xrKUAS_k#krl&QE$4k
zx;ndd*cykzLPJoZla=kgaNz>y>qog}()kBF`}*8mTv*<JEtY-x_pBSkzWwT;_Mt{I
z2Plj_Rv1`Vy!hT9tsvmAHcZ*ZrpD`fe^=L|uL;G<N_84j<oem&77Om(yLU&;Pa`jH
z@4{DCR(^bEcChb6(bt!vqM{X#d(8s^0vdXy%@^(1A+d7hO7pEoZ_BK!s;Vm1nhH2c
zN=kN4m>|Hx!nA+ezr5|z&CShAtkgRM7V-$m^|yx~iPoF$y!`S<IlrR^e}4Sank{vj
zb>W>nb2m4)3)il-ZQ5j1^Y7>L73<cuolRr*y=**d)1ggoe$43IoPHiut#$PF9-U>H
zy`l7Vm`>Cdj)@Z|253!PputtPTb8Z4@%pT<RkrIdUC0*fKFYz(-5jFzRXgF>PM!`(
zG&H^Mw^mVU`MzdqHYYdt<EtHwi4r_KJO}c&Cud}EoIQIM)ReUQ|EJi*-2Cvvf-|Rd
z6?`u*$dr2f_HD)cz26I-oDdWd7k~V2_xod;Qco|q{`z5oMa5YirhWMVi!Q%hw#BV$
zk$|8eW2=*63#j*I#9_e0y!<lf?6c3xOF~zMd=lCrzv%MI1(#ns1Wt(;6WRWCU)5Kw
ztgNgLC)MXKSh|$eY<6xK*AxDS`+ncM7YAyJr=OqaT3BfKZvX$f6DdY<Vydoz7haa^
z(aNslP@i)6@L@ao!%L$IV_TN}5>l;b=SUTJ5S03ZgL}H1ap;pGOA{&H6r-8(*SLKz
z@2L8!Rrhsu{J{*9Lpf&WIUD~d>YVO-E-z<OAuwGp)+r`N#>&d-kC@la7{0Y(r>!5=
z>Sk@7b^U;0|8eHcH{S%CF(tm5kZ-|Jz^^GEKO;oz=~Smr&O0=F6%`fhI8{$?dMxU?
z-AR?>u)&6#IT@Lmj_&U4`ukT!t#$M8@Nm$v+!}TE^#3!PbTpHBl$Nv@a#YyJ_4M_9
z5|WzfBPP%-^uz9K$N92}i8G>|RMxjH%e0v57k6!=+v1P6&fW?C5jDlcw?BGH%S(_4
zXPmk{U;809i=v_;qww9IJ#LF1PW@kDoMI%oey#CggC&<M1r(huPjfB_C^Eg=vT|XF
zR%K+PxwGE%&)*D%Ci2L+Xrx;Tb*dyhQk#C7S*T&gdkvAUhNz$S)Ut%q<b=-5e9FPn
z)KJ<X*nM>KtA$H6xU{Bn2?`26&SRYE!`6SCdG^`1jT;Rmdfj;V`OmL%+ZwdeBw=FO
z!T^s;)z27QG@c6@X9gApe*f~NBqcRf(bQD5v$K<fn_GJOsv}8;E=qzDZH>ntH=cgl
zbogPz$rPvjeEq2C=!-XQbhNdxefjq7T!e!2$`H0Qi&W0=b_ASLc2T-G#ar>Oqk;ga
z81EL-bxKTB+_ZV~@#x8|?d{IXFFR^Yt(xn0B9)hipFg>vK%lm^78LxUp`v>6`+BBL
z6N?iT`Kct-=^=ljl1<Rd>A=L(PfwgUaUgg5-L|GCrj)caB}+@mz`(#gv)*>OD7Cb;
zDVdmvynOj`L8er4a`KB8FC2=CjaRK+4eG7N>?lyw_&G0SwatG8C(F|dGp!P)8Zb2;
zcprJ`--58!ph8kaTs-;ZrKJ%&3KWHegqSXM{)uXFQrr@yyCq8Z)2B}x(#}d<xq5Zs
zqD4&U=jI%Id3pKrt-Lb>G<a@&e^ItOf2YjVXhEeZn`Ne-7Tqw@G-zc>R86+lR4z+P
zOASrUgRicx-k5Sy2vkEH{UpS}!okh$Tv%w>)z!73>T4FLwS2r^-o3KY^4IU*%<oQ5
zIhn%9-4(g^TBPU-r8_c8QyiPtXY9C}7xy?fzs*I7Gvc3o`MxVxLZ(calJNQ2*@%rv
zN2gSo__1~^Qkd#BasK@NJ9lD04Z@{Mm;QJ*JHO!Pr_`RF9+o#zjUrzA?oQX}QqpTs
zaB4B);9y|_<xcxI4qQv`=JC%cl@b(eT(n5(&5e!Bv(0jyoSY7=sVs0+66E6InlN>0
z>!L+UHNRdix2XD}5fv51l(e_-b1f6&eLWu$*PkZungk>|0@xTA25^9K&HASW76w*U
zR!>bYnDiby=62=!b^n((P74JzH8pSSsWg_Wc)%zkCU$Psoio`JPd|;=o+rC$)20ct
zXLr~B{ss!?w9S=X&)2S7=a!wVJ!$gf<uZTGW}n@oR?OAvv?4@HsFq*IMWeaFL7^c=
z@44y}zvaSVt4mL1gW9f<k&>WB=6q+ZsY>SN;{7(CINt61op<EO5ueJ~g9>VDZ2fk>
zG%j7f{PEfB{6{Y?E;f+h*>k@9-=E4kHkCp@fBwAi`fG61?4wB!r`|deIpwnC9OXaK
zN>dyiG(?;hU;MD@*@oM1?INe>oIdpU<AJ>Gho2T5d|Gty@yDL)%+se%UASk@obr7O
zgH|57wAB0KA#VKzOO~i8ti9jV>7sP-*4FIf?%(S^9Aw|~{HeuUKT!X4=gyrG`)Vv3
z-nVwocGa8er8xa`?W$c5vje?UgfdK|K+z8>Amx;n2EF{Qpt<_0m0ZV;FdkXH;}uW4
z?&h^$e%W#|#VI^ooQ<8`(8PqL`zX__WsZR&>*Dryy?T`u_d0xKNW|_k-MYW8;}>t(
zV6cF3VS-bjh>?-ehc}zgE7{n{tl#%bYyGjk3;}_Gji8aC<9)Ksr#A_7x;##{{!(T8
z=<(x)VXGA<pH!HBT5+n^@&Avd5(7k97cNxX|M!^FLIJO(MSo<#rQC!Ela{qQExhk^
z^Y-oTd-vjIoK2J9VcW88+oJ8;#cOM8U%Y$?Dq(;9{++lr>iAlZclLaFZ9V%}glOg5
z*uV%HJoxnK6Vp6l$s86Y#z&7HfjZ`n3j-e1|9f74;pWYk+asdZhJhmK>C>l=|Mz#h
z6o37xHI-}P#EBk$er`TKJc|}B5|EbezH%kx$dMxs%P%inzyADsw^Ikc?pJjxP;in|
zX>n2nwa|n*|8(uSm17pOr$SJF{~sg0*j+8tritBnTXw;Ow=MA)Yk`&95iLl+bN|1u
z>nF~hZT;MJlCy$<f{~HX_dCV>Hgf#`ejK-7w0`~h_wUc&KHWaMJT5rcc;1q8tly^T
z#de8mhb>sQ?%aEhT^=e<5fKtM%h&~#rZ_sO2r1gw+{xE&NR0%w+g7gR)SIrIaq^^4
zC(F;DKPw)#icgq6oxRhg$h$+O-9e!tZvF9dbFB?cOj_#yJeD_@>BCribAwKGu%WE9
z^x=nx+dsbDe*aL~=AX5`6FpiwIyj89)jKAH2Tc&@KH33l96c`Eeb;gAg9qD|pSStU
z!^O>A`Mlv=S&NfmQgU+P|G(dpA0O*=e7)w#*#M*2XZO5Y>7wMAlA?063gTKFF=Z<&
zs}*;ve>en+I5|5P-mm=*8Y$75`f8epC*MSmmMvRM{(M<(?^;#$YtDvCN0JPal9HYs
ziOLu42w>yf7}0a%#*Ik*DMdSFw8PdMVCJ`JIC0`c<wN$#UQ0D}bdH>_`?h(()fN7%
zD?_?&+=x(`;`roRJy#lM&`J?aP0h`>oE~qx<8Sv<B`7%f`PpWVKoJuuUbDPA9oFx5
zC?_-@UdG($z_2x{_sNqdk^e94oeGL!7AD3+hYwGjKi@u^y(ZH{D(BXg%scv)^98tC
zJG#3MgPic<#f!*nPwlB*A|fIaR+LXryk9cmf!non*X}htEfnbN?A-DH-|vN&U#6|S
zYCYFOMd-<sCkem5z1{O{K09;QA_WmH&+Kgq&Mih97fgEl`uP4l)vrG|(>R^!TBX{A
z7bR9XcXx^2ym|9;nYVkONQsqf+1p#KEiEmPlA+sf=4fbZAJ$&Kr)l22d6B;#?Y*kt
zWO??2N$<98W`AD#*B?4AUvIP6d81m>A`MVK{`R(94o=RQ)32$tN9edkMM;INjWX4K
zebZKs|JLo>AOHP+|9EP6oMU+S^)<h3?{*0+oa1IOvF7R2@D26<>m+#EA~P9VT3Qxt
z*dQP&DfwBg!C3GP$6ej%Z3;R%Im@OO2WQN3D=#;<`|*JJ&)fWZ=kjuM!PA*-98XOg
zFPlU~M}NGPz5e0je*1Y_l}`SATK4vq>HD>we4Q>$hYmUA+}l%mtxNT=!Hd_g54X$L
zb=3d+JpaX;H!4Y%y=U@2C(bI^eK#X3>(J}<`}_Fq{|GQMGc&F~{Okc+uB)4y)B5Ym
zCMF_UQ@OmB3JG<x=$t;3xBYO|)})dW5x2#Kdvg5rrYoD8iq1CAKej4#^|Q74pV|u~
zx$J+Q^j~oK<+|DLG{m|WUd&i<`Q?HTt%U&^3KKn!?Y4?s8#cp7jfa=_V1hwHLPEl=
znoDycK*LxIGNnLTCrp}T^uC<+GUHLd>%unvg;sM{tX+E)<iLKrUmVKH%8a6(U5ge3
zXiS(grDe(#5y-HroS~uNW2cp2s|AIGm{x{Z=}i&qW=-2HsjRI0xVPbHk)@EZ@WhD|
z3r}AyVrObgOfZnRaN$Bhb3j{Lo05tO3s);s&`J@xes;6jykgy~Zi@w-ot<C2ehmt2
zD{JeE*RHi)yB5a7$H#W((_#&-s=ad6-`*U2+;89a<x9z@OWX!78q6J+U%Hf*ntpqC
zx0~PY2ZLC*YQpnpUJBwm5e=-|VhIlqwVpV6(lIMbEAxSscVeWN-gNG}ckixPzyA1C
z?eKuu*xp^ctV~Qz4`yv;`=Yw)YSzaK&ioHM)#pjn{}g0m7Cgo9@MA?laBy>Ye68sJ
z@Ad!1Cw$~R@Lkllu&@v`G?BOatr=(pZs*RODI2cH9C`c^GzF1<ejaGj2~=t9+GW+#
z*T?2#5UM3ASN+D&=Hn6J;$>yEdJZi{3>Qp#mn~C!b9c9R{=T1S4sDH{E=n$LZi_Z-
z5Rf*@Iq>Z4?1|H+xgGf^){q(*p(8fOs+4Pf{Xff7r%zwJb4O;w^wn;Q4;r7hX+ADj
z&C}W0x!Fj*WSwJ6QP&2Y?se<*Kod+ic9-k3v9m9)@#}SKUb99AlmT~_y>$u;6B9fU
zJ>}xXz$w$FCEedw3z>;xUjK2cy^4{M(0SYMGIDi45?$QgC*MCW0-srWX5_Y5P`>5^
z<GqT<ykEY2@p&1;)W{GR7<gk#rtrM_f0cLY|Nrgi>Qb`*6fLnbr0dO_91&4bN4@FE
z6%`W6$;rWbfm@>1zIgM-rJ};((xpq!V&B#+P;|0neN$$=@5du<kegSmUOoHLP8A`}
zOP4Nv`0=>^@y+!4u8xieb&abU4J>26FFJI{Nkv8FK#m!^%GwhdCP@_)5*s&e1Wo$<
zJEiH+GD+RU7c{g7^1I(03&W);MLT2qnwpsYe44)hNbUE#?Jr(r%y*kBlqajv)fA!Q
z=H<n;Y15_$WxFq4z54Wf%Q5*&*RQiPfCf%>PX?Q}ckf=1VW4j0{^c)XyPXyu0L|0g
zzW+~5+AOCbYOUHzyU!x)j00R;SRnIM7w+7#S!!b3mU!XzZSI{heRuA}tO!}fu&&~t
zL(3#~z3JTV_x;wpSO5R-M{_fI5iZti>teg_|NFK*Vq+4k2p6lMCsT`)qKOo5P;l_V
z5Us%Y`2LL>4evcKwvaK9>gC|)caMvc>umbmU^BJu&?X&FWB$nr!7W?2emqw{`SjC>
z&1t-#xrT(7mzHjS!=TS(@1i95=<(x?>F4GCJQCmE5V!vM-`@$5V!ge+pvjb1uU{`;
zJLg+i8^_Z5H{O<krf0<UVjj#rra09rFeIc!Jg$PVTU@^_L`zl4a|dId*OPa5cYpkH
z+248b#SeDVZ@)eE^78VJH`C|)o(wgKS9E&G&U5&|m6gF3b$=|{+S@DVI;#jNDl0SV
z@A<&AQH3+^p<BqE^82+FFBY~hGdX`e!C=9fH9XJH&sPsyI$vPn7r}Kq!Sghr;nl#r
zb&l`!>ppU?iP<USwRBR4%0X?&%w@%ohwX*G-)?u-n)=C#-}my0HEWJ6bZ$>eo&NVG
zkD!-)q6FLVKH0;q+~O6}HL^^kY<@gw2Bp$1QM!VjP3Jp~COvub<iSbx`Jgeu_YFN4
zGmhNcoDLcqVOam+S{uhw{(@^y8kyNE*b7pOB&X@e+r{NNFS^ZAKk;Nr#I6#}c@>X1
zGqy&>Jx_=f<K^Z3@oM#Yrail&r9W-?#o&8+#flXNo}Qjwxim9Cr1i#)2%B#=l24pC
zp|F>UbI$?|uBf$Q(&l+bo}Hat*>5W+E9;t`p6-x!>N&TIM(u&8MGsf6-{<7-&##{y
zwD@Ae>uYN@^z_tBx3_32if~PwFrk56zGgysy27kw7w_Nizjf<Yq~*L6GpCkI+$O$@
zw{I7J`t+&6OrJV=eWA_?^XB#aez&`yXRSI5%S@j(Q1x_w-`wz}yw^XzdU0{F%CEP|
zPgZ_yy8ZUp&f@16E?zvCy?W8*mm5+~i#>VrWP+}_qC&yv<K^$}oUDFx_`w4OzqwYd
zlTW%Fxy0TP;Ky`&Q-Oudk9WJ@Czwe6wTn{`Qq<Mu1<fjXvMRBNbh<R1IpdRaYYS)A
z)?4o%zb&(#HEUKtY^?7x^Ir^tr#v4>afO72e$4V&a5ZaV+F7Yflja-fwzRY)+}@VE
zr*^8dv$I9vBNjm~c?Bm+PQT^CqN1V_5)vP7ztk1r5D*t{&%M2EA_HGylt4FV9_>Jv
zsCLCqtzI`}y_g*g-AA3a-H(H6e13lZ^Ql5hLbNt!TvWQWw$)&*h6q>A%}uO=f`WCs
z%jWp0-@0|{!nJE|YrOOY7S3VFV+aWi6<xp3ziO}C>+9>)x87;VbXA%NYAj@CX6`w5
zdDhvqkdTlGb0Ess9WE&^-+uqIHwzPEQgZUfqNiR-zCz1HyA~}7TDc+Xs#e@ptK{V5
z6>HYa`P4HnT2^U_@PiVoz}VQct5v5)tv%Mn$}J!*?fprO^Kzeyl4D_^A!vx&?poH?
zMf>*2mF=GULsPxc=BV(4qMgU;9sUUk3N|kFo-Q;|g@tvZN6VWxIY*8h`7t;6W!Y{+
z1A~Tr`|KVqpTqJ=#lPY7Q>Wly;q_%9f`Wn?Iyx%*Je>j-Xn-0P+~RsqME`EPnUj&1
zr&f2$Q^CoSv23^O+_`fLcE;4nsta`rNJ@5Y*<zwJrDcJlQ0In%hfeoSK7af6t&4_w
zBgaz~9v+^Al9DU$|9EbV>Rqx#MMH!OtW<;xG#L=*^s>OBV)~17;vE5g4&mX`?_X2-
zP;2T}RAeOR=@cNy!Q$cN)#SDG)9>xii*{bPeA&5X%4CO@A|}v?<K9G5DPAEVp=F7#
zEQgL1WIBNc&BL2^#^~|z@CYoN!xGeB!w4D~<ab{hq-bVVCKYC~Juy;D=XBGuOamTf
z(E61(JF=U9f6rd1k#)yPh=XN;1{YH!15+b|)4~tC>ec^LXDyD=`>q{*`0>XBIcAgB
zepeA#xQA2c^r0NH&q7{HgE;rb+4HWRCF7T|PE=aYHdF0LOEkB7SKfAM=x9uJwRKf>
zwV{PY$HIl6sf3>1-h&Sd8kW5*bewYfDF-KKV~n1AMTLcuk`gGT9y#K&WZ5z$EiJC3
zjTJFA>7Vjh3+CF-7XFy9Ufsw@sA%V%_m5-r#CzP7lai8_C59HuD@_qjFp;X{T<Wy&
z!PFn8#r-B6J9g~Cty@<6S}$dofckV56&6R53NOe-_?hfDwTxR_@4=_1r&p|9>snuL
zuOr5tV<s)b$3DkTJw{I)JVdqg-S1~df>eYYHAI}0CORlhbkLjbtT(+s-?+G_C?F!D
zL)Cj)%yf5&GyA8XcFoDpKmOvyi;8E?US6jvs`n<WX<ww#(%SlQ)}<3$MN~jTFTw}X
zrJVw={5UMbvm@rb{{BBgPoF-0@%pv&`V9izM=xY;J#qT<;cdCMH<Y{#$}pLg8OyKU
zr|vh$p|aBQ)vH&ai3QNK$Jw;~P04r6<_i1IK3jG;eMyk!xp}tT{`P-Gs`koRT3UiC
zL@zHd0a;nww`<?_xGA4HeY)_+hr}z_t{uuTJN&k+88i#p_0i~>^}8L+nwpxI@9mhm
zNTa2#?URt#tYwv@ALi=|EaXuLS}CG6HS3mkQN*;?lPQPN=hwFV{r!FMu3b`p|Nb@S
z5OfV(v1-+!o14=&*8VPgb8oM-q@*O{_B=Ot_rTE5*8F`x*~Il?B<@FjmVNzN+i$rr
zGcz-2K*iaad0ouTCeRS~hlA{(!3wjS8x42v#DM0>zU<ODy=d7ow%_009=;x5-#cyE
zwAp1`)4i6iSh2z(GE(y0-tYH5UgW);8GGt!QQTiviMEHKt!D8GPLd8TN`mY8^!^zc
z88v;sUoW4(_p8{d)vMKWG7fCgadUGkczB5Q%lGexQ;Z&7beDJa^Ye4KmS{G+@Zlj=
z8OtJ;=jZ3UCnqbfzc;tBp}KO0kD7^z$%0j@xIp#cr>CbkR(?(s5fS;YDr%=BXywY^
zuh&79_rmq-^AAa7F2CIQ?OWNGuU{AL-!C7wTJ-z9YW??toA(_EZ^;+)0gv|-^K>6=
z5?1$n09t?J>)Y#J7cwpN;?0|#px%t4q9SOqR7r`+ds7P;zL<SAlJT`)Mf>FK`)19O
z;^F82Y!|;EQG(6Z))qAAQCeyWTJZu}kac>R?upZ<A8$2%`r_TYW7Y5Xwu2S~{r&sb
zPWEw$6=()!&&OlZpy`5JTeB<sXQ`gyanWGzxczosSi8mPO`zTuNYk%hRr{NLZEbBW
zN?r(T+O(<Qe(m><o9u4gx)l%=)pfX?|M8ofn<Zpq=3Je2;=u!jK3VIwUAwG8LPHZ@
zUt7E5@3-5PPj_w7>ArL+sOIO>>2}>J?YG~$m6n>?{rTVwnqM_EER5RrAm-N8{S!h~
zpHy^`bcoRtUlXRCF>Bs5J5UcuTrcK8r?7g(cm6f&*B{@Qe7xZI+wICqN{5ywM@(xy
z{q)ez;^!Z3rq6%at-sG9I{NmUle-_s8MO1sI>pAyzIy$-@Oo@{;>Slv>vnpvv9T3=
zc@YR|*+sP<eh5l6T3TA58HuGqCtur4PTUw#aaX+2L7*W1-C7pGQ=AjLme%c0+$qA{
zeN^ZJhw<r6FJ8VpxO{#c*W!yOZg)Li_WtVi>%Uc|@EksnVe$yH0%iC6echntM>%Gn
zcYX7DT(<l8<tb57Q6Cn||K-TOzOFTD?YEb4;o;$nmo9DnzOFGf60{a(^LaaGM@Pm<
zlO{1`9WAuWv|GB}MMJ%v<0(sqiB#Oh?Ad4A9zJ~7z^yu+D<mw;EY&+NBZEU$RyJa5
z*3~8R7iUWC-Mjb4pU>w%p0EEWTv}SXNA6IzAO}mp>Z=!CmR!i%8n7}XKtp80>8GHP
zwnGOu>Db8e=a@<7m`NWyb}YeYW`c>7p_P@@(zpLkoZtZU8%jz`6Dukt%I_4iD=91Q
zy_c`I%T_c-PaG7v^8bGv@96F2&G^ut8VO2&LPZM$JpL@3?9lQ_t>9-y&9?Jlt406*
z{p;cB$@yZIkDBnhxV>EyCr;FyfBI?BT@(F}Q8se?R@T-ZFS^SozQ4Ek#hW)zM9)4e
z;#$0D(Xs#D9v%*$wL^~{Ex7)A;nl2#mtU^i{K)e4oeYyh874^y2@H4d-Uan|+}+uE
z*qA{hd<GH~rO(T(=I*HYsO0A6W?*I26}!8v@~uby)~Mxw79~WAIXgRp=6ATbxCEr6
ztd@TM`{7QE-dg(r5!RVLW@|70*vTh&inF7q=gHI!Hda<$^XAQKSf8Gg!}Ip-TSiOq
zaBW+`IUfIB$NxWcZ*TQqz8Hr4{puH9f9>e+cTY=Go2C;98m34|Pj_A$_B&_oe}flJ
zmrc&38E0i@2L=W<c8lvT+PG2h?%lhf=GN)*(*``C6tUd?FQ^s9Z}&qXDJe<qPo9fX
zphHoLWSe7jwDg*=>;o;6yf3_e`SN45hO3g`mZ-N|)Q_A`1vLPCe0cO1n#?}?;>8P&
zA5|H%+AhE3Ik9t!muiXC-InVQm0qvk_e+b7jqQ)sb{{q8loS;sW8;lwZ=-VV>=4|!
zb0?$rViB%Z4qo1)r>1HL#>DU}zudWMmDZGL(-y5+!*llRS##fg^EsxaUc7s^cmLnF
z`2t+6KQ8vynLISJ(b3UK2o3jJZoJ^<GM&?hGE5F782tEaJ<&s@qo+ru`GXp$Eu*5Q
z_UY7iW2xSZ+}yd-PLwOGk?dL|AS&AW<;xf8f4zwkJiNRQ4>U4Yyxn@;z{29i=So!}
zPRK}FzpQmx&+os>HlOBq4V>V$R6|S4$<dKfuIhzi&BvqS78M^9R;^xbXk+tcbL-9P
z*W3U9z5n0U!-M19-tTcQN_I)_d%tGQnvXwk2K#n$eJMX=zyn%faCX6TucaNmy{b-@
zoGu#bpamKW7cPA9;>8E;_=`y!7wp|Dt2UW);lhQC)e<T~o(@{xAzGrKDS~O!%I^Mu
z{%l75@3-53{J#I6uleAE$r1q~tXrdc=gg6Lb7!Y8Xn-~;IhlEz@7JflVtQ+xoS5Y5
z{}krj+avk=+uOqjo7n^7<N3RfKKh#fQ0Mgi|9{_i^z<A#)+>EnI;sFP0j8|XEUq8d
zax`h?rCC?AN*%H<X3jb`RXe<5YVWBOBNbIu-|E~yF>NOA<}$1d>3Z@cW&c{QwnPI1
zgND`N>)A{MqgP*5vbL^1Rkvi#8XcYZeSg}Xt<#&%J$35TAFt#8cdc8eH$RR~X(ETA
zq2Z0K+2V0^KT~bKUJ3T_@Mv&RnmLhk3R7dkpMAF9?-W=3d^&w&@^QZT+oziNE?&Bn
zwf^tx_`u-c;$~B=>8C+Ut2EhPEeX;T(}@t+Fv&~r{MJpICWXJ67Nh6x<;5ikY4i$p
zvPeow)|Gd&?eq89SMboup*A3EtJKe*KTn)E@j%xpYpc}JqenIL^!A+#N|IokJ9loy
z=d<R4!NHFw*Z2RftNZtP{^Rofzh$TCMzbvqvee#|4<7axI(qbI#mlABKRlW2uViX^
zb@}w3%P(D0Q&sO(z1IEm?c2Qz$whB(ZH?HLBl&)gGi0%!PRtI21kUF%b!oY|x@*E#
z#~%+8X`L`ZV9C;@f(z%cC`}OtO=3-+eE4*wy@^Rn@$+-6B`fv>Xt;QJae?NH;+U5_
zDY68O0aw4<`Mk}gc=OGk{QZB$+WF<by>y)Hp`xLoaX`QRr#GlC+R@+7?`14dKhbNc
zhnLr(V1L`z=ku!hDl04PI-kCHp#dsXr%auC_rd4xuC7D#Y^wu;f*yVMp5kp?{h*P3
z!h{I|2CHvBEqeIp=jT0AlRTCN9XWp7dHwatmR}}2?3>B?V4M5Cu-usL!w((2y}7lt
zwVAU*+uGY7pR;~{WN!IA&R)02pLeAgNrGHE|L>dhh&>e_OT*?Lf9zOXY<#cocWzHl
zPlagNM2{9#?`aQyKA#U7>K7CfGt*vkq7yV;-O+L2@Av!t^Z);Oe&YQ3?^%``b-EiH
z8TWiXXPuFszyBPsq6pW<nx93Ww4<S^`SGUd;m#+0vep|)Uteo@s<TN4v=nyJCZjmZ
zZHkJDC(fR2bx~5>dT+8q&C8|J<7T~dJ>#}R{_M-k%P-!%`Lf)U=di(rtgWE=#8Of9
zU`GW3hs75aOie}2X7k3z#un_pE8MlwL7~BEA!yJJG%~Vy$BrLt+&2>p67KJ-{qg0p
z|K4R&IaruL3mned|F_webMw$^RU_d7&Z~E(9D4RFZM|S^TVjIQY(ZJs-Zg91yxw*)
zX(MP<>%!}=5^{3$N(5wG13}H|*Iz-aAm>#+o7s?8oH@%aI9NEY_G@TKS=q0>&7FOH
z$DH|XL0!U4U*~~Rj6>j+9q-p|+H@&f^)&y(PfxC`i{)M$CcPoIH$cNBB0}QRr%w~+
z%;A}R*6iJZ=Rzl2ofMBG89sXS=)+_A{}p#8{YmVUI&83F-MVA)|30vP`Sz`<_5?`w
zbo@V|=<Ru5PcNFwTaY9vFVDX*qDMP?U4`$`!rgZbq<S?pH62q@o|M+FcTsZm@!>Ht
zHddVIapC@b{^ge|@7hlAPyq$!)+|wQ25MxN<H%TE3|ZIxuw8!9mMvG_d)T-JdU$&u
z-hAG!d&7nemZkR1yp^p!O(9xGyF|4=yxaZ$*VJH+n*B?2GcylPkFV=gpI4!@;IX2L
z5a*=HlMCPP{hoAnRVZj<LT#<>t=qQ~H{Vnc>IAJVgU*uM*!)>$cVyv0#VOOLFW$KE
z;_7Y(waE`Z9+yA9%y;&Jt69sx1jJfb@cj5*Gbj6e<=YJ=(^8LLZdmr|(8F!}Z*R|c
z&(GKY_vg92ztoOJnNqyGyc+uY?m<D9cs+k9Dk?_osW2205-NB;xBStkr>7rBmo5$J
zoH9iOG(B%;Cl?qP$oRhQyaCUzU%x&)ogRNE!{k@(9<QZNd3o=6^%)QKg8I1|BYNJv
z$x%^PU%Ywq<@Z+i4<2-su`0=^N%K;ut#n!_5Vf|fMmsJf#N^(i-bET3Q%`-2+1n}C
z^zL2Wlxfqd{_bvZQZ&oG)slOAo1&IhmKVRZImg>CFE6iHzaG@0jf#?*rW@V%;>C-P
z#nEfSu3WkD;L=j>4G}sQUVq*3cH8X<^XAztea+3nbmZvK#NXfEg2vY-D!Z%w-E%hW
z@I+;IP^0$#mRX(N*RH+(dgA=~_Oofsr#C&G_<h2J2@N~r>N<`lfum*FGPO9<7m109
z5j%@iKl(#PerlRlhplxA4V`-bX^q}=?yXz5S`<8Bm{<GlW@cvcQPpT~l}VhZh4R}j
zzjVpY*4B&JVeoR=>_r+bE-nuavdg=;xgDGGCxYkjg6-SI>;Jxv|9H3jequp^0MB92
z`X^&!VbB;9XiW9pySz8|_sfF@#3XnMcHf<F`ss<2Cl4m~+qU`6HuKH=ttHkyar*T3
z-|zR!KR-Wz{uQaknNrWs&2>&sf8L_$cE^^3pa1z@wfe%{cQbNxkAj8+OQ(x-uyk~G
zg4Ql{=3inzVg3X(NB8yh^~a0*?H*k{TU}8h!Okyt<kVE{jU_KHMNW1zHZf@d<&Iow
zHL>oEb$_cw#Kin|@=m{+1u_vdAyraR60`koer~R-voo{Z-!H+S72U6{u9lFJ+V$@D
zyO%E+Pj5PL;)Fw1)~hKxB~!gX3l8qo{eBB7S=4<tJg>3|3ky@Sx370g+jc+47dlA)
z>eZ_X(?UL8Ue~}t!KCEmPScmnDz+!MW12oZI@(<s*%Gt)1}ON9%_?4lrlVta79D;4
zJVZ<M-JZ{WF0QV=KN~kitp%;Wzj*z6`Qh&m3M>Mmqq{+UeRaP%J6?L9OxyhM)Ku*i
zYt}ru`*3daB8?-*k0<~B_BQVCah}5q7A;D;c7}CR)LPIuFlg}n-ekSUj~;=>7B{{9
zCa5$e?tzy0w%proCr)_8J>L+a0~(9A`}t(Drukf6k>=*+jEsyKUyPGBN`M9eHY7MY
z%-VIyq_?rL(erO+-uCKCAv)7fo9;`UucbAWtFp54#JO|xu2?-icKH+M3>K;Br(1vh
zs@n7S+imkF6~DYZJUGPkVmcNrQc{{^=hSkE-9<^TtGgSt!X>WhpsV`*y5G5=fp50v
zL^oaUS<4KKjGET1Tjy!7KG9=C&CgE_OZz%pl#U!d>YK^6$VZJkV!8CW|GR2_e)9ZV
zqH`LwI#O+N=CSwD;o;){zOJu-cx7eq9`_&>HMOM6%X}}qEOF7OZQ)qT&%we3O2p@^
z-@D}G=&Uc^aW^k!Z<T2M|KImNp0j?xVE_L5V{%sQhZ`S0OkDr8D`}$yD=RA_x0B{n
zublh){_c9QZU1roxIGfxV!B)rdW)jgx;Z*FhCc*Nd0)72VT<3*IXttLff~Zw@7GEH
z{{H^?-Mt<wLqIc1s;a7taSzw>3VO*4a<$&rkjM<0@-{dAP+<d_>$|tN`r}dY_z&Oo
zeMM)VeP-HmxVON=&yO#HZ-vvs1JiV)3+~r`S5#CysJ}X3X%J|p8?<`l(W6I9)B0Bg
zXiS(i=~3|AZy!EvxPSLB^U)6<4D5crP~P+N+3et|lgpMaO*}nKx1+O@^8;6YZb!f`
z4%fhrzP@93%kTGIxe{_OTC6Sc!quy-pnWg8y1IL6e1ea@Evv45z{<o}VkLVetMtGp
zdsZgKMT-`FxDxFDaJl_o&91JlJ^M<IB!P?B>#^kz#rOYUy|zAH{=!67rHLF;y~mcn
z(p-MoGWq-JZQIIXRDS;2VJgK7it@6zQJbEA6IPm1=MX3YYJpr`9sYaj<{!>Ba{Qpt
zoK>r|_TNro<@VEHo|~*L!X+p!-v0kb|NjH)>%OiojKAqJ|Llnq2S81x`~Uxazp<n6
z@s=~Cg55_Mb2rIkfJTfWBRiixOS?BsC4BjLQC@*R%^XYTXKalE4J);_wgy*a+|4uB
z($-GT`Tdr+Ytgc`i&{>r+}N1RKCk+n<&tH~?tLldYGne=0CMZ^Y54NxOQx_<*P>(Y
zVQ1WW+?2DjvzI@)+}G80=-JuX8>_y)Vpx|0o>c-ZBl!0AcK!^37yL1suR8GZd=}4N
z<Z#)ft*z}r+3w}0b2B$h6jYkX5wx--O51(My9b94AC7CySk`joVZo2P*$<*rggCFQ
zjc$K?dprA@$PYEm9IE^-N`j!mKtxp3S966yu(EKA)57qN4}&^%#7=K<dw%=&ZS^;g
zjIFG?_U+r3*=u6V)4Xw`;rx~$9kKHs?|mB`43>83ZH?;PvuDq2zA4H-*aW@iS;+9k
z^>SFSFfoGGlidnE5iY{j>i>Dk+cN8@=;-CXD<Y=378ieRDUP#PU1s%cN%4&wvn^Y;
zRBU~t*~Rr@M^rrrC#PdgjZJ>{wG5L*`}WmE?LQtZt2D)?VE0`OU0v7EP*MHt?%9XJ
zBO*>@uQ?nNs5O;?pa1)>3%g_VLPA1(X3J0Yelkx-y&xxthpjm=Y8^j=+GI{qQPH@?
zH;Z=42nq`BNz?G1df`IAJ=HA-zU$8cty8_RF}cosfm#i}pqE@pNy&pBA0NMX@!~=0
zo)E30t3p>-oPRP?i>Z+zF)=Z&c8%V2?&bdTkAX5|?tb;x7x-%QmtCGZ>97G04;%BT
z)2G!%1-ceZm@-A=0^6B?3qrI&h0vOqos-HLHtyUhsqR0|rM$enA>7<v!Re(s&tcF`
z8OPF6)Aiepuk(L=*e>rD6LY7Bl>^iTojTR@W%`akdrwc-_xF|(b`A9K^lWrnyz}LL
z?pCK4CA%`S<ISwBy1b|BE!?-SF35112G`D=JA=JAXLE_TDjFIJ^4tG0c>hc#Z@ct#
zz1TyY!s=<h$<ubS2zuE|%E`G!MoQ*`=Aa(z`~B|lBvtRUBeR}>wka-h?cQToBG%3N
z`}=$M^z`%t@&=6#4ZGj%;s(tVhOKT5(UN_?R&)Ak&=%0r*VlY!*qMk*v^jcvbMwhq
zJTUzkry-Jba*`?sH#heWs~XpZ0SP}pJq0yw-=8o|5B((RWOiTiug2eo)JV`mu&-ag
z?zvQ@BE%`C6VWi&y4-2~_466`;#^!<#B`%r=K7hxY<^s}yYST&&6@9b%R72{cz)ze
zee)*g&%5&dho8@{@6!%nci>@xgMxs9m6g=%>+6rd-G1M0shX5|b84gn51Wi_mB@1c
z`SZT89$+ox;^Im>vxvpu-`qF1wsM2kAA;H`)>7?<8&`*~fAr?&<{R5`Z=0<Ba;=49
zsr-(+c`~+DQ?{F~yqdK!{ro&o-Nll$Q9@Hw6V!?-`0?SPXR+$pG~>AXzojWDDG6U+
zU1i+dvh&;n(ArNYr$ZH5Lf`+FTE!mNeW>_x?CH~|KVFN@uauQi7vTzwiRo!+X_0)-
zq9@cjVdBI_+v;x%wr-soetY|}Won>+nQfjw@8kY-J;!bL><$_5yn6Mj;Onc<^_PF=
zwfU$y*VWnO?f+Z0B4m~1dZvee;>zz7wqIvpJZu13{}BK0lX%>;9&;5TP9<gKiL+*P
zefd(dWa&~xMa9IZed_(kn?dOwv?(+yDr!$%oZj^0+uL$G`ugUHe)m~@HR<oKub|P|
zsnMB6GeMS`n3?$<=E{!+mF~WmK|9&X%gy!U=Z395x-ND%(=~5HffZM?L_|dsUtU@o
zw|HmXc4>A0c}M2|`=UPoQf8X6TtEBMr%x-MP7N>k_9k+DlfGE@#+;i*I&pg{u7zyj
zXmwIiQ+qV8`rX6STw|W*ZQIP|*wt1^g_XYsRpt@XS`R-waB{MG<<v=@Dncpg>B(1D
zg)+Vrk<(~#QoMBO5@>tY{FVb*TNiEFBBH<lk5Qkzy<PIt=`|Z}zXdfrKr7ZB-Po9H
zcQ*5ShRKc1>HI=MLTM_x8A?u0P60tdpqb3LmrkHj<>F^&=Ip9BKKo|p@68f<uWw$u
zbZE7g>*}jXpP!xW=<J+ndQGK0YOR~6C#PA?jR%*V)6HfVet8l2=X?EsbI=&_e)ZsU
zEVZ?@72ofce{8q^!<d?yYPZu5)OXLl)iPB(+-YeLYvR;>hfBNeZ^;y1zx8I0nTe_C
z!i5VPmvH^$`1tG>!}IIE9)>u?gF3>!y+?n2eVuM1o1xU=q&R8PB+$yhh3nSw_4fA0
zZNHkFoc#D&X>P7AXy<ZMQ&Z&k?>FC;&9SeS12s`6D!X5}a^=b80HeGGcbfU_I)424
z5t-@>+Q+dbe!txBZ*M<WyRe?#^y2;d<3B$?pE!B4aqsFqr2?>x#%b1NyJf4tzjFoU
zkgs>Mwn~9&t#z@x7lde~+b9$mA4xJibolT=1D<s;J0D%%Hf33+1P>d?GfS7UHa9ne
zdbBS~E?+Xf6dp0H^=#VVrQXvw<lnd3^ZVUyP_wi49P`sl1@o$2Y3AJBRT{=}FOTiI
z#^Pnm*mQMu>;CIy&T4a8d~lj>blP3LbTcO%Fa3h13pZ}OnB4G3V6I>L*Vosb7Y3X;
z#m@J`ZkCjk6ljoj;p)}g@ArPc*V3z1cjCYShJF8jWru`@KK%0XGHC3suFh`LrcDCk
z;_MqEY~I$JU)Nf@dNrtf0_y#=a*G#yeB`>{o#l0j)t&11dqJDKw?w^NvoC#4=+C?5
z_kY?tg9c%HdwW51Lq0xd)@+%TF6^SQ_W@)P(Zq>@`~O<b(a$_NN!2(j^=DInhKrLE
zQ{Au2^AF{iJ-)Ee*>1U`Ay4zXd2(OAd;u+r;1<(4aHy3VREoB=u(-IpJFma4tfRxD
zH8ty$SkLjtlKH)61_llH?!|$2buV1V7``sXF*jFty@=l0ux?rFvIiI4<>v}NQk;Bp
zL&`}Z7kBr?`}f~p@59oT2%3SLHA~9H)%EPJ{iVwkonEr86SkbsuCJm3T5oyb_1EI0
z>cgRYwx7>{+K8a_4S(Lrq!`T%h>PnpPCw_cG6b~H+|$#uqrcz1q{QUVp+gHov=k?L
zC`>--5GW$U$DT2(EkHx&Ja5U)m<5?tdn)y(YrHGlowzZ=z{I5G-@m$&va&^M*Ye8B
z%5J!svu74h@z2iN+w+fK_P6it>FJ65pTxYcerMs*6}`Q^b@#P5-|ShnN^4Ez<~Ei2
z)r~dQaRs~Yu2{M9p{ZQR^2>=oKRqpewD9o+$m&VZvREe-p@%0XDuXt4MK?rDYdxBD
z@apRD!mqEce*CX5z_B1`rNZ>n3xifF2yhsfnXyej&D!b0)aqpTF1=*;-3eYxIXF2P
z4;x6x%C_Fk<IgZo%oF)|ZEf^N(8^$Q^YU|d*FQZy{qc#3%5~hJ0kc)BS3ghj%h{ml
zWXT!x{q?nLVf&9yDXQCl=+Gg{@7tz%Ed`AT)cwAF-)Zs12Ol0De(~-dpNA{oVFQEN
zXANebeewQ1|ADNZhkYyV?5nk&SM_RTWMI@Q{X5;htFJCvzWn%l@mXinKzoCA#JHan
z{ao|)_&K&`*MwzKY{2Us4XvzRZFg0j>Ln;I|Ni@*S2vqHRGiAn%<BICzOQU%CN{t3
z6DO!Q$hsv>88qH?^5j9#5}p&v{bycj_Dnt)`s&EBV~aprrCyjHHZYj!1KK8#k?~?q
z^+I;xPiK9aZohT&_U2x{@O#!)shvA_7XLb~F2Dg=Bk+8Fy<BBwC2adU4-bz|CeJY+
zrKw(ml9HW|9wq(xaoqmcuF}`ba&M&=Jdo;m^7QG$M@PFs(ek<?PvqF4R_^Ct@=JFq
z-an&KAf`G$OJ-|SFDtj$nV%U-uAnjX8=F$OK|2ro?f*%DCdq1RZF}4n`*Yse%-HGD
zbm2k(Xc6l3b8|s0=tYYb<#84YbRUh_kia;_Yw3Nb%<Z=?Uc2V@XG+|R-}@(Jf>!8l
z-aI+nca7Ilr?j+ZbHu$K+&A9$_|c;eUoQJ6|NZq9G;8SP?S1jmC8o(IS=tX9u1(JU
z=@2OLq{wnhl&+DH5vczPT1U5X<w|2S#$qc0u2#@k@zouN^R_SEym>OmQqZag(8m3D
z@4iKA?0lHm+1a`M%wdk2{i4&s;}4ErUR<Dc_fw`$UASe7h^eWm!AzeGQELmV<{GQb
z0v{jYxH80P_0^<@hgxs!sWc8-8`ZjFhsBAMP0@$C_K2<HnYvo()AQZs7BYLRlg_H=
zt&R6#c(}?IG=*|6&TDDVi?3BHuD-H-{d*SAL=TfZLv<CEgyVg(phc%EgO?Y)yJLCf
z`t|<hf56)e^Ddak7JPi76T2~jr_+ULWeC^SsNOeka&+SN$!LeKlbL@~dHU(+DFJ82
zcZhF}Ti@=Y^zrr4cSSo5EG#&--{zf`nt$?swgPAcNsP;dyzRgLHfT@vI&$pTy-)U5
zeg@7?P6C30jeq{s*nB#n{P?WVtJkkVyJOCs^ON9VvykC4;4w~p?W!Qq6TLk!Fwn!t
z$HmQ!4K$lEcW&>84+R=iy)>qtS}-NxoQa65qlSo*ni`wY%rp86pA?1ps$HJb`-h!b
zX^ODJWfM0yw~D8HS|VMM7X_^&3K->AhH$M7>wfVfBPAsTv>V}J!GWhm2Okz3$S_GN
zDG^CdPTo_S<g`#AYpYb&Rw=8wd`2^K_MCE8o9w7J-C054%<T8`oD~FmxR<Uk{RKMZ
z!ELc1&*3{k>lQgUwS4-&y*z!tZ%n=Pf%!8(-spR4(f2mMDPpd}jYIoZXoe_F*;3%B
znz!P}{A+O`k5&my@mjDgfW=o^ktNl8mRaKxPR|owkB)SfpP%!cNpi2f=D9O-Dzs+V
z%>TWyZ_UTYo9p~p+Y%EJ6CGc%zy1FE;)M$joL1Y_JbhT8A@XZJ*BvL0DPF1)Z5vl5
zR_y$~B=c5%RkZ(B&90`lHnx?o6}^@UiHV6>`0r$5OJ!Y_sWj8a>#N4!y85g6+JDvu
zi?Hrm_kVdG`@iWLT}>4>da_fV%Bkx1|9!*NBJf^)$J|d<dsnPpUA!>V<#ezJYiVg|
zP`%6Q%Inv;*-}}ZI9}FA#9p_*p|p34nPyj0fQHJ}ubHOre^nh^ux3HbTDP>mpwQ4C
zHFn}ru3PqeYMON>sV7mwV`&i2X;A|WZ2>2R`RBd&+U#0aUmzZI@m6?rwD#4-CcdAW
zZ6d!aCI+l?_;xffbZWQQm-Slu(@y$*H&6*bwjieQ&9})xD^)^YO!o-e8L;V0NMNP6
zc<8YOYaBvDS(+VJTFwW_&g#uDHJaJ;Wloib$fLB+q910jbO;Uo@we{f$-;9#FYNc^
z`1tRg;&Kz;>iT*?-G0ZNOgAm(*Vy&{{8=d;<+8;_k>%O*=SyETPkU~@(uggU_36V7
zpQAIK7B-|9y>wdLF-^T?g5HufnX|r_T&_Bpzl_QEazJ>vy6BXfIbz1f#xhlx_a8Iy
z_3`)j7M-cL|25C4Q$NmzEXb5f+BoA%R)X5(gGY|EaIaO0nkT^Y`0?YQ>(gHB>dP=W
zwIGIZ|4gosP*)Y9#`DjMZTx5YEsr#v^)Iw7@xj^I=6%hZVg%<o?lh`17M;R<_}JsZ
z<9ko#n00^rSeRlY$+!PxsT0SN%b6<|Tq%|PSTSeyJu!(X+80V?XPe~~#q3%YBU@Wh
zF~iYwoiUH|uYZ;M@AofTw#>BJ*KMnTM)-q|6&rGIoBjA(cl^Abp5CJM>-)Fgp7`2p
zsnEZFzw?(|&OCnp*WZ7p<(%`Rf4p6D;PFS$LjNE~OMCnNcklAf1y6e2byTUUs%pcI
z9WQ>|bWmt`dwct4_smAeolLRUH)dbgyK?1<MV;AHuZ6pJ&sHxv5$eQ|vN^J(yxjY$
zI0suZsO@yVDmo!x-GRp+_w0Xn<>dvH`R9{MN<`Mj?S1rNc1*C2*vC&#y?c6l7Y1lp
zRBhX{S$U$z2CxN9pK4Ebx;SP1u(h`CZs(WxnmbKllk)uYj~^ax4^ltK)W|Sv_H03}
z)*X*>8y$BxeUiMv%+BW_`e^R#+0NnN;_dgJbbZ>kVBI>s<Kb7B9AaZ-`D82(ByQ*B
z@#5aK4pgOcv>)!9e>QD1Xfb+z{`xDqeJ<LXB3z*LD)sgIuUtIpq0)4?oxk`-E_3sx
zrgg6~va_9?otqzvdvP?LJn8x8&*$@N{5?arZ`smvdwc%lx3{<Vx$DeYmiYKsZ$@@@
za=@(v3$`p+rM2jy2A`abhUuTUh=>D!e}6Z$vN{!~8tS_-;>d>&26cbGhOf+Z{Q4DK
zB`Hnhc&?tC5O8b3Yb(%+J{cyje%wsj*m3Zn<DV~={ZFJA>22w|w)p5#*FQfV_e;ph
zP5Z_C@5SQ&f>&2Gzp3RW1>Ab@u>y4N(96YfCsTyX^Y0z8|NrN(XhGCgr-cpE_2VC1
zTN~}MG^lUB{(KAT!bdFClg$h@asxs|eSLjF1GukVzm~O+*ATgI`*ydqd0s+dVx#s0
z6<?0VojWaS{{Q{{;{E&SzaIA<e+=3LXO?&8#DXoIj+v8>KRzfPU(@J2+sxB;|Aq|>
z@^wEL`{isWt&;R$*|WOz*Mo!1clJ~U|5|q5MX7Of`uR^a+nN6?%Lxh<1+7;4_xHCT
z51Xugyw+6c6mn8Q!31vWClNnI&Rty{ZfIhnQu;_+h7WWUOgq1P*}~Rr@#afvu7Lub
zE*Eaz?9`3k=27dWHu<3b{y$Bxudh$8s+z@We8TLqiJ_rk<NJNT`M$rqJNcG5=bqQT
zWhEsZZ@J^nOHARLYg;Xro0}WtF4b7hZGFG)x9+=pd$ny#7JXHp>h<v3+uJ*;zOK4b
zrtskI#G}gYeGk68yd0W$_tx<RB@Tfipvm;-=jI;%_V)HtyTc~Fdn!M(-LLt~TUA}X
zlgn?~r=K=)dny{&@Bb$i92|V9yV1J*or}9WyOgx_>N|(F_B(2VW>qcf{_F_(Y5L%9
z%;m?&`wJf*<2`fcOvujCE!TSb`yb!hnqBeh%gPefRSpUbpjA@o=jN;|dVP(ZZE4H0
zWoj`y3K*-uy_t9~ZsPgpg>P?}^6>L-=8+CfkzkA4Th;pd`g-sE>RpQ-Tv+J*;?0|s
z{2-y`pr%C{7Ul0idxix%U6$B6b2N5#a`N!;eLBJyx@F6jmc~X#A3r~@dN&<0=k#=S
z&?LpX*;9@zm=Y2n-w&D+SnfBsY2E9f`H2#2$BrF~Flg3_iitV$<HwFKdzM^(eH65~
z`r#qgm-eMe0bbhgSKE}olbJPZ))IU5*=LJh_=-;TS{f=FaAQ}gHfX9q#-gBM@5#`F
zfOSpFGC5kEI9Ql2%?}I?K75EJWa9D16XQ%+8Ui$05)4iRTdY2k?>|X^skXLuNA-6-
zHg@)<cFxzoeqZ{dbDD6c%ZB9Rd@t=U|Nq~&=~#me+tMX5dhOBM^A@gN?fq&>%c@l#
zDnggOf4uNZfP-a%hYCpb%7w*x(+|(JE-(20F81YRn<ERRD774ztnPo|>Q&XfQZK*%
zE_``OHRtB0*27AxR{UwNdU;9p<?h^L$Bq@ezh_%gULL-)^fi0^LLslELhtVEY)(Hv
z&r|nE#w^fo7EsT7W5k(Ou}e{xZrtej{QUglrAu93`Cm2Qv8nkX5E~n-b4;nzQFGz;
z?c)9N_Q!5-&tE!k{mPXG!{cjPr%e;P*Wf;_B=-0`+iK9})t75CE?>S}@#Tf!uV24b
z-P!fE(NXi^zrVj<yna1-)xXDIs|uf-5CokQb?3U<HrK_0C1qug9vo~As$bWZ2->K^
zFK1J*uryn~`O?ChHzRGTzIasS8OiWH{`B;;hl<b}-$}W<)+JwG7rUe8=cX@ZTP98v
zw5j~Wax!JpskPC+C8qFkD1P|!vzUj6$0L{N+wZ?Wemw3ke0D~1rL=5j?dsL5o!#Bp
z{pQ(luD|~J$Il$I?tlO4{``DCUvEoZZgPOtmj?%#d;0sW>-<7PP0Y+5otml*YU-QM
zoSZDz&)(_61nSE=J2zh`TX=na{NsN6e;~I?H(ydyWSKT?+J{F+xzC(CXH{n>zyaE9
zkddE1eV11gM@9LiWq;&OzS%!_)-2E(`b#gjaIcNtEiuK6Q_;Y}qGRGj!IizcFJ_2<
zj$+Uedpt9G(y~=rW;r(+y2bSuty_2Q<L8?>V(a7gAG>g20#xVBWu58g=OyOlo%^^s
zZoRm!t}dvM^M80@6>GDjjCC0gs5aVt;ma43s;a7(jGMQPF4%I?#J8lpJh`s!-p9>b
zw{FeI&v&n_wFT)}xvDEdr|sLfvX!DAw&&k}w0{4;qJ^p1;`Lk2I1~dkL@wOFuV1<`
z*g|I7ykMm$KLZJu>gwGgj#E>NIvX1qMYveSqqc#{W&x*!tSqj}moN9l>-k>Zka(Ew
z&E4JMS>Egyi~iKvgSJFf*u1M?ceR^;{K*rQw6wH{z!Pe0OL^N86A}^}vX&YMa3p+x
zcbB96uuNI-)fKC{JXD(Q?k@N44GCH)!p6oXF~#iT^z7!$SrSrGlXl%!li*4E|L^am
z^pZ}kk3VZ}?62P+l6ZgVQdJ&)eq#+#4U;fS&CShiN%N+yTc<jDhCcsVb@A3ME!io`
z%E~-mvn8gO%~;lX@nYc0<nBg?hR~)HqH*iR&z?OS<T%rBIj9HgwE7y<eMLo=3Inoc
zflk;lF)=Bqo3v`(o`8t(@aGkd>XT0<6cq)nV!Bp!Bz`@sB1_Ru88I<28FAaMKWj2_
za$2sxewrz>%E~qH-*egGMG-nliHVFqYvvu!mzJ0!=5zVTojY@`Y;*-ThSbCkC|s|w
zk;}QaCo=1Gq~7$yZ{FxMUt(ih+FBGK*6n)y{I43joEsY&59by6R{qcxp3>^XSX^Ap
z<0burdt1OdDV5^u9oJt^)oXW6X<xOf<M`vC-C8_{9jxZ2f$MmM76+}VlXkhBdHhj9
zhR;~zr0J?vUb@d1-apd}UO9z(ZB(_y6tNZo10LsB@!{7?T^0xO?LBGwbgrn8?Ws?@
zeXoHmiTgV3rrW|*TH$+6mfFqte<f?o!@M)5&l&8UFk|<XnX|5>B`EC<33c`L?Y(m0
zwgHchuCAd5sI3s7HFZgxn(t)+uGSliRjzAvH6<8mnC?xUwd_h>-`5Vumr9B)A^uZN
zaGo>X)$f?;7%1}NZ{5m;zAHl>ed8{gnmNm3`Q@O*m6=lAp0|%I*b*Rj>jcZv%vmQ=
zjCj0?dIgwn=7eQ!oEYr<%}^uvLTT))RIRCAb9B_hITe>wPg%9fYya)nyv>)Kc7A9$
zlTx`c;)$SP(bPnV88c^2jG7V{Drz_Xd~4WRTOKd&Rja)6Uz~mZdB&_+Nt)V4y@?V*
z<(7=^`)*vjetolhCa9IQNTb6=DX6u)c&E(0J(Y(;Jwd7@q@^cwdBt8A>~xWsVrHPB
zJLP@Av`E=AX`4YO2X2gb@?rKWKaR#4yZ(m{6IZ_aZE9-TaWsj?3zQT>LR~Fnrb*R3
z|6JMA)00qDHS2Kxs$E4-|4y7Jc-+=&{q^n)ldfC0qS%)5@_0#`_<|O*%gDQnI4R8Z
zd318J`ktJ5tMsOye)#h8a!|X;U1wncNA&i*)PrKPf9T#^kSPUfgn}9`*7o-O_v`=1
zW@Wg9cvePV$lw3B?dVb0<L9qmyS8ZO&Y62XL3v}vs;;}c%aujAEbGcLGda)Cw?F>q
zky88p5YwktEA`{{OxWdSDb%?Dl(jO2n=i5TA8)Sx{mpZ&fK(f(3FhPDQ}A{6s%0Ts
zi8VDcCsQtctX`_Ym9$af+uPg6!A-H|MH-;}Pi61!bUG+J_)&S-pyIY{=+w+vC!Q7s
z)$aq9QJI;J$;ryimq5Aj>19jMa(x}K;y-(?zjoCTdz{-7Vk+0q{;?wFRnW#>X>-tc
zc~Eupb#2!Tvy;7drp!7s??e%+(mI2ntsIK?!tYP7l;0bt-+YOYO;vCrYgu7OhmL_p
d=O85h$g`O^M=2cMatw4*x2LP0%Q~loCIE>U_3Z!v

literal 0
HcmV?d00001

diff --git a/frontend/src/pages/GeneralSettings/LLMPreference/index.jsx b/frontend/src/pages/GeneralSettings/LLMPreference/index.jsx
index a0169fe15..0cecaa4d1 100644
--- a/frontend/src/pages/GeneralSettings/LLMPreference/index.jsx
+++ b/frontend/src/pages/GeneralSettings/LLMPreference/index.jsx
@@ -8,6 +8,7 @@ import OpenAiLogo from "@/media/llmprovider/openai.png";
 import AzureOpenAiLogo from "@/media/llmprovider/azure.png";
 import AnthropicLogo from "@/media/llmprovider/anthropic.png";
 import GeminiLogo from "@/media/llmprovider/gemini.png";
+import OllamaLogo from "@/media/llmprovider/ollama.png";
 import LMStudioLogo from "@/media/llmprovider/lmstudio.png";
 import LocalAiLogo from "@/media/llmprovider/localai.png";
 import PreLoader from "@/components/Preloader";
@@ -19,6 +20,7 @@ import LMStudioOptions from "@/components/LLMSelection/LMStudioOptions";
 import LocalAiOptions from "@/components/LLMSelection/LocalAiOptions";
 import NativeLLMOptions from "@/components/LLMSelection/NativeLLMOptions";
 import GeminiLLMOptions from "@/components/LLMSelection/GeminiLLMOptions";
+import OllamaLLMOptions from "@/components/LLMSelection/OllamaLLMOptions";
 
 export default function GeneralLLMPreference() {
   const [saving, setSaving] = useState(false);
@@ -163,6 +165,15 @@ export default function GeneralLLMPreference() {
                   image={LocalAiLogo}
                   onClick={updateLLMChoice}
                 />
+                <LLMProviderOption
+                  name="Ollama"
+                  value="ollama"
+                  link="ollama.ai"
+                  description="Run LLMs locally on your own machine."
+                  checked={llmChoice === "ollama"}
+                  image={OllamaLogo}
+                  onClick={updateLLMChoice}
+                />
                 {!window.location.hostname.includes("useanything.com") && (
                   <LLMProviderOption
                     name="Custom Llama Model"
@@ -193,6 +204,9 @@ export default function GeneralLLMPreference() {
                 {llmChoice === "localai" && (
                   <LocalAiOptions settings={settings} showAlert={true} />
                 )}
+                {llmChoice === "ollama" && (
+                  <OllamaLLMOptions settings={settings} />
+                )}
                 {llmChoice === "native" && (
                   <NativeLLMOptions settings={settings} />
                 )}
diff --git a/frontend/src/pages/OnboardingFlow/OnboardingModal/Steps/DataHandling/index.jsx b/frontend/src/pages/OnboardingFlow/OnboardingModal/Steps/DataHandling/index.jsx
index cd63d74d8..81b93c5dc 100644
--- a/frontend/src/pages/OnboardingFlow/OnboardingModal/Steps/DataHandling/index.jsx
+++ b/frontend/src/pages/OnboardingFlow/OnboardingModal/Steps/DataHandling/index.jsx
@@ -5,6 +5,7 @@ import OpenAiLogo from "@/media/llmprovider/openai.png";
 import AzureOpenAiLogo from "@/media/llmprovider/azure.png";
 import AnthropicLogo from "@/media/llmprovider/anthropic.png";
 import GeminiLogo from "@/media/llmprovider/gemini.png";
+import OllamaLogo from "@/media/llmprovider/ollama.png";
 import LMStudioLogo from "@/media/llmprovider/lmstudio.png";
 import LocalAiLogo from "@/media/llmprovider/localai.png";
 import ChromaLogo from "@/media/vectordbs/chroma.png";
@@ -61,6 +62,13 @@ const LLM_SELECTION_PRIVACY = {
     ],
     logo: LocalAiLogo,
   },
+  ollama: {
+    name: "Ollama",
+    description: [
+      "Your model and chats are only accessible on the machine running Ollama models",
+    ],
+    logo: OllamaLogo,
+  },
   native: {
     name: "Custom Llama Model",
     description: [
diff --git a/frontend/src/pages/OnboardingFlow/OnboardingModal/Steps/LLMSelection/index.jsx b/frontend/src/pages/OnboardingFlow/OnboardingModal/Steps/LLMSelection/index.jsx
index f877e31db..850dea3c2 100644
--- a/frontend/src/pages/OnboardingFlow/OnboardingModal/Steps/LLMSelection/index.jsx
+++ b/frontend/src/pages/OnboardingFlow/OnboardingModal/Steps/LLMSelection/index.jsx
@@ -4,6 +4,7 @@ import OpenAiLogo from "@/media/llmprovider/openai.png";
 import AzureOpenAiLogo from "@/media/llmprovider/azure.png";
 import AnthropicLogo from "@/media/llmprovider/anthropic.png";
 import GeminiLogo from "@/media/llmprovider/gemini.png";
+import OllamaLogo from "@/media/llmprovider/ollama.png";
 import LMStudioLogo from "@/media/llmprovider/lmstudio.png";
 import LocalAiLogo from "@/media/llmprovider/localai.png";
 import System from "@/models/system";
@@ -16,6 +17,7 @@ import LMStudioOptions from "@/components/LLMSelection/LMStudioOptions";
 import LocalAiOptions from "@/components/LLMSelection/LocalAiOptions";
 import NativeLLMOptions from "@/components/LLMSelection/NativeLLMOptions";
 import GeminiLLMOptions from "@/components/LLMSelection/GeminiLLMOptions";
+import OllamaLLMOptions from "@/components/LLMSelection/OllamaLLMOptions";
 
 function LLMSelection({ nextStep, prevStep, currentStep }) {
   const [llmChoice, setLLMChoice] = useState("openai");
@@ -124,13 +126,24 @@ function LLMSelection({ nextStep, prevStep, currentStep }) {
               onClick={updateLLMChoice}
             />
             <LLMProviderOption
-              name="Custom Llama Model"
-              value="native"
-              description="Use a downloaded custom Llama model for chatting on this AnythingLLM instance."
-              checked={llmChoice === "native"}
-              image={AnythingLLMIcon}
+              name="Ollama"
+              value="ollama"
+              link="ollama.ai"
+              description="Run LLMs locally on your own machine."
+              checked={llmChoice === "ollama"}
+              image={OllamaLogo}
               onClick={updateLLMChoice}
             />
+            {!window.location.hostname.includes("useanything.com") && (
+              <LLMProviderOption
+                name="Custom Llama Model"
+                value="native"
+                description="Use a downloaded custom Llama model for chatting on this AnythingLLM instance."
+                checked={llmChoice === "native"}
+                image={AnythingLLMIcon}
+                onClick={updateLLMChoice}
+              />
+            )}
           </div>
           <div className="mt-4 flex flex-wrap gap-4 max-w-[752px]">
             {llmChoice === "openai" && <OpenAiOptions settings={settings} />}
@@ -143,6 +156,7 @@ function LLMSelection({ nextStep, prevStep, currentStep }) {
               <LMStudioOptions settings={settings} />
             )}
             {llmChoice === "localai" && <LocalAiOptions settings={settings} />}
+            {llmChoice === "ollama" && <OllamaLLMOptions settings={settings} />}
             {llmChoice === "native" && <NativeLLMOptions settings={settings} />}
           </div>
         </div>
diff --git a/server/.env.example b/server/.env.example
index f73e0e083..07abed62f 100644
--- a/server/.env.example
+++ b/server/.env.example
@@ -32,6 +32,11 @@ JWT_SECRET="my-random-string-for-seeding" # Please generate random string at lea
 # LOCAL_AI_MODEL_TOKEN_LIMIT=4096
 # LOCAL_AI_API_KEY="sk-123abc"
 
+# LLM_PROVIDER='ollama'
+# OLLAMA_BASE_PATH='http://host.docker.internal:11434'
+# OLLAMA_MODEL_PREF='llama2'
+# OLLAMA_MODEL_TOKEN_LIMIT=4096
+
 ###########################################
 ######## Embedding API SElECTION ##########
 ###########################################
diff --git a/server/models/systemSettings.js b/server/models/systemSettings.js
index b5dfeb700..a66f93e19 100644
--- a/server/models/systemSettings.js
+++ b/server/models/systemSettings.js
@@ -126,6 +126,20 @@ const SystemSettings = {
             AzureOpenAiEmbeddingModelPref: process.env.EMBEDDING_MODEL_PREF,
           }
         : {}),
+
+      ...(llmProvider === "ollama"
+        ? {
+            OllamaLLMBasePath: process.env.OLLAMA_BASE_PATH,
+            OllamaLLMModelPref: process.env.OLLAMA_MODEL_PREF,
+            OllamaLLMTokenLimit: process.env.OLLAMA_MODEL_TOKEN_LIMIT,
+
+            // For embedding credentials when ollama is selected.
+            OpenAiKey: !!process.env.OPEN_AI_KEY,
+            AzureOpenAiEndpoint: process.env.AZURE_OPENAI_ENDPOINT,
+            AzureOpenAiKey: !!process.env.AZURE_OPENAI_KEY,
+            AzureOpenAiEmbeddingModelPref: process.env.EMBEDDING_MODEL_PREF,
+          }
+        : {}),
       ...(llmProvider === "native"
         ? {
             NativeLLMModelPref: process.env.NATIVE_LLM_MODEL_PREF,
diff --git a/server/utils/AiProviders/ollama/index.js b/server/utils/AiProviders/ollama/index.js
new file mode 100644
index 000000000..3aa58f760
--- /dev/null
+++ b/server/utils/AiProviders/ollama/index.js
@@ -0,0 +1,208 @@
+const { chatPrompt } = require("../../chats");
+
+// Docs: https://github.com/jmorganca/ollama/blob/main/docs/api.md
+class OllamaAILLM {
+  constructor(embedder = null) {
+    if (!process.env.OLLAMA_BASE_PATH)
+      throw new Error("No Ollama Base Path was set.");
+
+    this.basePath = process.env.OLLAMA_BASE_PATH;
+    this.model = process.env.OLLAMA_MODEL_PREF;
+    this.limits = {
+      history: this.promptWindowLimit() * 0.15,
+      system: this.promptWindowLimit() * 0.15,
+      user: this.promptWindowLimit() * 0.7,
+    };
+
+    if (!embedder)
+      throw new Error(
+        "INVALID OLLAMA SETUP. No embedding engine has been set. Go to instance settings and set up an embedding interface to use Ollama as your LLM."
+      );
+    this.embedder = embedder;
+  }
+
+  streamingEnabled() {
+    return "streamChat" in this && "streamGetChatCompletion" in this;
+  }
+
+  // Ensure the user set a value for the token limit
+  // and if undefined - assume 4096 window.
+  promptWindowLimit() {
+    const limit = process.env.OLLAMA_MODEL_TOKEN_LIMIT || 4096;
+    if (!limit || isNaN(Number(limit)))
+      throw new Error("No Ollama token context limit was set.");
+    return Number(limit);
+  }
+
+  async isValidChatCompletionModel(_ = "") {
+    return true;
+  }
+
+  constructPrompt({
+    systemPrompt = "",
+    contextTexts = [],
+    chatHistory = [],
+    userPrompt = "",
+  }) {
+    const prompt = {
+      role: "system",
+      content: `${systemPrompt}
+Context:
+    ${contextTexts
+      .map((text, i) => {
+        return `[CONTEXT ${i}]:\n${text}\n[END CONTEXT ${i}]\n\n`;
+      })
+      .join("")}`,
+    };
+    return [prompt, ...chatHistory, { role: "user", content: userPrompt }];
+  }
+
+  async isSafe(_input = "") {
+    // Not implemented so must be stubbed
+    return { safe: true, reasons: [] };
+  }
+
+  async sendChat(chatHistory = [], prompt, workspace = {}, rawHistory = []) {
+    const textResponse = await fetch(`${this.basePath}/api/chat`, {
+      method: "POST",
+      headers: {
+        "Content-Type": "application/json",
+      },
+      body: JSON.stringify({
+        model: this.model,
+        stream: false,
+        options: {
+          temperature: Number(workspace?.openAiTemp ?? 0.7),
+        },
+        messages: await this.compressMessages(
+          {
+            systemPrompt: chatPrompt(workspace),
+            userPrompt: prompt,
+            chatHistory,
+          },
+          rawHistory
+        ),
+      }),
+    })
+      .then((res) => {
+        if (!res.ok)
+          throw new Error(`Ollama:sendChat ${res.status} ${res.statusText}`);
+        return res.json();
+      })
+      .then((data) => data?.message?.content)
+      .catch((e) => {
+        console.error(e);
+        throw new Error(`Ollama::sendChat failed with: ${error.message}`);
+      });
+
+    if (!textResponse.length)
+      throw new Error(`Ollama::sendChat text response was empty.`);
+
+    return textResponse;
+  }
+
+  async streamChat(chatHistory = [], prompt, workspace = {}, rawHistory = []) {
+    const response = await fetch(`${this.basePath}/api/chat`, {
+      method: "POST",
+      headers: {
+        "Content-Type": "application/json",
+      },
+      body: JSON.stringify({
+        model: this.model,
+        stream: true,
+        options: {
+          temperature: Number(workspace?.openAiTemp ?? 0.7),
+        },
+        messages: await this.compressMessages(
+          {
+            systemPrompt: chatPrompt(workspace),
+            userPrompt: prompt,
+            chatHistory,
+          },
+          rawHistory
+        ),
+      }),
+    }).catch((e) => {
+      console.error(e);
+      throw new Error(`Ollama:streamChat ${error.message}`);
+    });
+
+    return { type: "ollamaStream", response };
+  }
+
+  async getChatCompletion(messages = null, { temperature = 0.7 }) {
+    const textResponse = await fetch(`${this.basePath}/api/chat`, {
+      method: "POST",
+      headers: {
+        "Content-Type": "application/json",
+      },
+      body: JSON.stringify({
+        model: this.model,
+        messages,
+        stream: false,
+        options: {
+          temperature,
+        },
+      }),
+    })
+      .then((res) => {
+        if (!res.ok)
+          throw new Error(
+            `Ollama:getChatCompletion ${res.status} ${res.statusText}`
+          );
+        return res.json();
+      })
+      .then((data) => data?.message?.content)
+      .catch((e) => {
+        console.error(e);
+        throw new Error(
+          `Ollama::getChatCompletion failed with: ${error.message}`
+        );
+      });
+
+    if (!textResponse.length)
+      throw new Error(`Ollama::getChatCompletion text response was empty.`);
+
+    return textResponse;
+  }
+
+  async streamGetChatCompletion(messages = null, { temperature = 0.7 }) {
+    const response = await fetch(`${this.basePath}/api/chat`, {
+      method: "POST",
+      headers: {
+        "Content-Type": "application/json",
+      },
+      body: JSON.stringify({
+        model: this.model,
+        stream: true,
+        messages,
+        options: {
+          temperature,
+        },
+      }),
+    }).catch((e) => {
+      console.error(e);
+      throw new Error(`Ollama:streamGetChatCompletion ${error.message}`);
+    });
+
+    return { type: "ollamaStream", response };
+  }
+
+  // Simple wrapper for dynamic embedder & normalize interface for all LLM implementations
+  async embedTextInput(textInput) {
+    return await this.embedder.embedTextInput(textInput);
+  }
+  async embedChunks(textChunks = []) {
+    return await this.embedder.embedChunks(textChunks);
+  }
+
+  async compressMessages(promptArgs = {}, rawHistory = []) {
+    const { messageArrayCompressor } = require("../../helpers/chat");
+    const messageArray = this.constructPrompt(promptArgs);
+    return await messageArrayCompressor(this, messageArray, rawHistory);
+  }
+}
+
+module.exports = {
+  OllamaAILLM,
+};
diff --git a/server/utils/chats/stream.js b/server/utils/chats/stream.js
index 5bdb7a1f0..b0dc9186b 100644
--- a/server/utils/chats/stream.js
+++ b/server/utils/chats/stream.js
@@ -199,6 +199,7 @@ async function streamEmptyEmbeddingChat({
   return;
 }
 
+// TODO: Refactor this implementation
 function handleStreamResponses(response, stream, responseProps) {
   const { uuid = uuidv4(), sources = [] } = responseProps;
 
@@ -231,6 +232,34 @@ function handleStreamResponses(response, stream, responseProps) {
     });
   }
 
+  if (stream?.type === "ollamaStream") {
+    return new Promise(async (resolve) => {
+      let fullText = "";
+      for await (const dataChunk of stream.response.body) {
+        const chunk = JSON.parse(Buffer.from(dataChunk).toString());
+        fullText += chunk.message.content;
+        writeResponseChunk(response, {
+          uuid,
+          sources: [],
+          type: "textResponseChunk",
+          textResponse: chunk.message.content,
+          close: false,
+          error: false,
+        });
+      }
+
+      writeResponseChunk(response, {
+        uuid,
+        sources,
+        type: "textResponseChunk",
+        textResponse: "",
+        close: true,
+        error: false,
+      });
+      resolve(fullText);
+    });
+  }
+
   // If stream is not a regular OpenAI Stream (like if using native model)
   // we can just iterate the stream content instead.
   if (!stream.hasOwnProperty("data")) {
diff --git a/server/utils/helpers/customModels.js b/server/utils/helpers/customModels.js
index 3b4397c31..5bd7b299e 100644
--- a/server/utils/helpers/customModels.js
+++ b/server/utils/helpers/customModels.js
@@ -1,4 +1,4 @@
-const SUPPORT_CUSTOM_MODELS = ["openai", "localai", "native-llm"];
+const SUPPORT_CUSTOM_MODELS = ["openai", "localai", "ollama", "native-llm"];
 
 async function getCustomModels(provider = "", apiKey = null, basePath = null) {
   if (!SUPPORT_CUSTOM_MODELS.includes(provider))
@@ -9,6 +9,8 @@ async function getCustomModels(provider = "", apiKey = null, basePath = null) {
       return await openAiModels(apiKey);
     case "localai":
       return await localAIModels(basePath, apiKey);
+    case "ollama":
+      return await ollamaAIModels(basePath, apiKey);
     case "native-llm":
       return nativeLLMModels();
     default:
@@ -59,6 +61,37 @@ async function localAIModels(basePath = null, apiKey = null) {
   return { models, error: null };
 }
 
+async function ollamaAIModels(basePath = null, _apiKey = null) {
+  let url;
+  try {
+    new URL(basePath);
+    if (basePath.split("").slice(-1)?.[0] === "/")
+      throw new Error("BasePath Cannot end in /!");
+    url = basePath;
+  } catch {
+    return { models: [], error: "Not a valid URL." };
+  }
+
+  const models = await fetch(`${url}/api/tags`)
+    .then((res) => {
+      if (!res.ok)
+        throw new Error(`Could not reach Ollama server! ${res.status}`);
+      return res.json();
+    })
+    .then((data) => data?.models || [])
+    .then((models) =>
+      models.map((model) => {
+        return { id: model.name };
+      })
+    )
+    .catch((e) => {
+      console.error(e);
+      return [];
+    });
+
+  return { models, error: null };
+}
+
 function nativeLLMModels() {
   const fs = require("fs");
   const path = require("path");
diff --git a/server/utils/helpers/index.js b/server/utils/helpers/index.js
index 115df4003..bde5e8a0a 100644
--- a/server/utils/helpers/index.js
+++ b/server/utils/helpers/index.js
@@ -43,6 +43,9 @@ function getLLMProvider() {
     case "localai":
       const { LocalAiLLM } = require("../AiProviders/localAi");
       return new LocalAiLLM(embedder);
+    case "ollama":
+      const { OllamaAILLM } = require("../AiProviders/ollama");
+      return new OllamaAILLM(embedder);
     case "native":
       const { NativeLLM } = require("../AiProviders/native");
       return new NativeLLM(embedder);
diff --git a/server/utils/helpers/updateENV.js b/server/utils/helpers/updateENV.js
index fe4f4f5c9..11278f97f 100644
--- a/server/utils/helpers/updateENV.js
+++ b/server/utils/helpers/updateENV.js
@@ -81,6 +81,19 @@ const KEY_MAPPING = {
     checks: [],
   },
 
+  OllamaLLMBasePath: {
+    envKey: "OLLAMA_BASE_PATH",
+    checks: [isNotEmpty, validOllamaLLMBasePath],
+  },
+  OllamaLLMModelPref: {
+    envKey: "OLLAMA_MODEL_PREF",
+    checks: [],
+  },
+  OllamaLLMTokenLimit: {
+    envKey: "OLLAMA_MODEL_TOKEN_LIMIT",
+    checks: [nonZero],
+  },
+
   // Native LLM Settings
   NativeLLMModelPref: {
     envKey: "NATIVE_LLM_MODEL_PREF",
@@ -208,6 +221,17 @@ function validLLMExternalBasePath(input = "") {
   }
 }
 
+function validOllamaLLMBasePath(input = "") {
+  try {
+    new URL(input);
+    if (input.split("").slice(-1)?.[0] === "/")
+      return "URL cannot end with a slash";
+    return null;
+  } catch {
+    return "Not a valid URL";
+  }
+}
+
 function supportedLLM(input = "") {
   return [
     "openai",
@@ -216,6 +240,7 @@ function supportedLLM(input = "") {
     "gemini",
     "lmstudio",
     "localai",
+    "ollama",
     "native",
   ].includes(input);
 }
-- 
GitLab