From 80ced5eba4f6eab477e8921de943cc5b88a681b6 Mon Sep 17 00:00:00 2001
From: Sean Hatfield <seanhatfield5@gmail.com>
Date: Thu, 22 Feb 2024 12:48:57 -0800
Subject: [PATCH] [FEAT] PerplexityAI Support (#778)

* add LLM support for perplexity

* update README & example env

* fix ENV keys in example env files

* slight changes for QA of perplexity support

* Update Perplexity AI name

---------

Co-authored-by: timothycarambat <rambat1010@gmail.com>
---
 README.md                                     |   5 +-
 docker/.env.example                           |   4 +
 .../LLMSelection/PerplexityOptions/index.jsx  |  88 ++++++++
 frontend/src/media/llmprovider/perplexity.png | Bin 0 -> 15863 bytes
 .../GeneralSettings/LLMPreference/index.jsx   |  11 +
 .../Steps/DataHandling/index.jsx              |   9 +
 .../Steps/LLMPreference/index.jsx             |  12 +-
 server/.env.example                           |   4 +
 server/models/systemSettings.js               |  12 ++
 server/utils/AiProviders/perplexity/index.js  | 204 ++++++++++++++++++
 server/utils/AiProviders/perplexity/models.js |  49 +++++
 .../AiProviders/perplexity/scripts/.gitignore |   1 +
 .../perplexity/scripts/chat_models.txt        |  11 +
 .../AiProviders/perplexity/scripts/parse.mjs  |  44 ++++
 server/utils/helpers/customModels.js          |  18 ++
 server/utils/helpers/index.js                 |   3 +
 server/utils/helpers/updateENV.js             |  11 +
 17 files changed, 483 insertions(+), 3 deletions(-)
 create mode 100644 frontend/src/components/LLMSelection/PerplexityOptions/index.jsx
 create mode 100644 frontend/src/media/llmprovider/perplexity.png
 create mode 100644 server/utils/AiProviders/perplexity/index.js
 create mode 100644 server/utils/AiProviders/perplexity/models.js
 create mode 100644 server/utils/AiProviders/perplexity/scripts/.gitignore
 create mode 100644 server/utils/AiProviders/perplexity/scripts/chat_models.txt
 create mode 100644 server/utils/AiProviders/perplexity/scripts/parse.mjs

diff --git a/README.md b/README.md
index ff50a8587..200355707 100644
--- a/README.md
+++ b/README.md
@@ -71,6 +71,7 @@ Some cool features of AnythingLLM
 - [LM Studio (all models)](https://lmstudio.ai)
 - [LocalAi (all models)](https://localai.io/)
 - [Together AI (chat models)](https://www.together.ai/)
+- [Perplexity (chat models)](https://www.perplexity.ai/)
 - [Mistral](https://mistral.ai/)
 
 **Supported Embedding models:**
@@ -108,8 +109,8 @@ Mintplex Labs & the community maintain a number of deployment methods, scripts,
 |----------------------------------------|----:|-----|---------------|------------|
 | [![Deploy on Docker][docker-btn]][docker-deploy] | [![Deploy on AWS][aws-btn]][aws-deploy] | [![Deploy on GCP][gcp-btn]][gcp-deploy] | [![Deploy on DigitalOcean][do-btn]][aws-deploy] | [![Deploy on Render.com][render-btn]][render-deploy] |
 
-| Railway |
-|----------------------------------------|
+| Railway                                             |
+| --------------------------------------------------- |
 | [![Deploy on Railway][railway-btn]][railway-deploy] |
 
 [or set up a production AnythingLLM instance without Docker →](./BARE_METAL.md)
diff --git a/docker/.env.example b/docker/.env.example
index b14d3c6ed..eed505782 100644
--- a/docker/.env.example
+++ b/docker/.env.example
@@ -48,6 +48,10 @@ GID='1000'
 # MISTRAL_API_KEY='example-mistral-ai-api-key'
 # MISTRAL_MODEL_PREF='mistral-tiny'
 
+# LLM_PROVIDER='perplexity'
+# PERPLEXITY_API_KEY='my-perplexity-key'
+# PERPLEXITY_MODEL_PREF='codellama-34b-instruct'
+
 # LLM_PROVIDER='huggingface'
 # HUGGING_FACE_LLM_ENDPOINT=https://uuid-here.us-east-1.aws.endpoints.huggingface.cloud
 # HUGGING_FACE_LLM_API_KEY=hf_xxxxxx
diff --git a/frontend/src/components/LLMSelection/PerplexityOptions/index.jsx b/frontend/src/components/LLMSelection/PerplexityOptions/index.jsx
new file mode 100644
index 000000000..0b392cf41
--- /dev/null
+++ b/frontend/src/components/LLMSelection/PerplexityOptions/index.jsx
@@ -0,0 +1,88 @@
+import System from "@/models/system";
+import { useState, useEffect } from "react";
+
+export default function PerplexityOptions({ settings }) {
+  return (
+    <div className="flex gap-x-4">
+      <div className="flex flex-col w-60">
+        <label className="text-white text-sm font-semibold block mb-4">
+          Perplexity API Key
+        </label>
+        <input
+          type="password"
+          name="PerplexityApiKey"
+          className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
+          placeholder="Perplexity API Key"
+          defaultValue={settings?.PerplexityApiKey ? "*".repeat(20) : ""}
+          required={true}
+          autoComplete="off"
+          spellCheck={false}
+        />
+      </div>
+      <PerplexityModelSelection settings={settings} />
+    </div>
+  );
+}
+
+function PerplexityModelSelection({ settings }) {
+  const [customModels, setCustomModels] = useState([]);
+  const [loading, setLoading] = useState(true);
+
+  useEffect(() => {
+    async function findCustomModels() {
+      setLoading(true);
+      const { models } = await System.customModels("perplexity");
+      setCustomModels(models || []);
+      setLoading(false);
+    }
+    findCustomModels();
+  }, []);
+
+  if (loading || customModels.length == 0) {
+    return (
+      <div className="flex flex-col w-60">
+        <label className="text-white text-sm font-semibold block mb-4">
+          Chat Model Selection
+        </label>
+        <select
+          name="PerplexityModelPref"
+          disabled={true}
+          className="bg-zinc-900 border border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
+        >
+          <option disabled={true} selected={true}>
+            -- loading available models --
+          </option>
+        </select>
+      </div>
+    );
+  }
+
+  return (
+    <div className="flex flex-col w-60">
+      <label className="text-white text-sm font-semibold block mb-4">
+        Chat Model Selection
+      </label>
+      <select
+        name="PerplexityModelPref"
+        required={true}
+        className="bg-zinc-900 border border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
+      >
+        {customModels.length > 0 && (
+          <optgroup label="Available Perplexity Models">
+            {customModels.map((model) => {
+              return (
+                <option
+                  key={model.id}
+                  value={model.id}
+                  selected={settings?.PerplexityModelPref === model.id}
+                >
+                  {model.id}
+                </option>
+              );
+            })}
+          </optgroup>
+        )}
+      </select>
+    </div>
+  );
+}
diff --git a/frontend/src/media/llmprovider/perplexity.png b/frontend/src/media/llmprovider/perplexity.png
new file mode 100644
index 0000000000000000000000000000000000000000..f4767169a12cbe2189eab51d8c59deafe4ae5b33
GIT binary patch
literal 15863
zcmeAS@N?(olHy`uVBq!ia0y~yVDtiE4mJh`hH6bQRt5$J&H|6fVg?4jBOuH;Rhv(m
zfq}uM#5JPCIX^cyHLrxBqR1-6%Ei&GFt^gwurM#w%Fse5%gWTyz|!2**vQh<#KOST
z+{nVjNT<Zg$i%?N$jHdR{cp4;0|U$F%#etZ2wxwo<osN{#FYG`RK1Ga0tOJUsj#ZZ
zEyztRNmQuF&B-gas<2fDtFX!|wgL(3tJ`rYK!HtBN}5%WiyKsFQA(PvdPYe}ft9{~
zd3m{Bxv^e;QM$gNrKP35fswwEk#12+nr?ArUP)qwZeFpnx*Zp)Q7);)$wiq3C7Jno
z3Lv8slk!VTY}M7_svu5ENrqcfP+F7&wj?E4KQ$*66fnj5hI)o5u1ra`D#<L#NkwzG
zV@?U0NKPimCssL$dFi(56}l;@X^EvdCF*ved0_o13Mr|@ndx~7x(WrUMFlyj6`3WK
zHu@l!=42un1r|XyT_1}Zz{-%kOlD{}mXs7_Cc#4_uUIb;EK&*zM!n?xT>ZopP?#8?
zgb75Km19aVlEXnh2~I6U6UoU0M@B(nQE_TWWkIT~dQfU{erZv1DsobQ8t7b<npl#W
zVrOJvWTI<eq-$hkqmL#9k4+n>glk2KXNp~_nVE%|xsj1>l7)e#u1S(*s;)(nNs6wq
ziJ^INTAGDXvato+EQl5iliZR#Q|wHQOpVM;Ee%Z!%`HtW4B&dek{DV-@=KF5K;E)L
zxD+9VP!5hQ%xFdpE|^zQgB7fm@`x@#%3h#Y2IpH){1&97=^N@9qr`7PN}82pX-P(Y
zkzGh;u7Y!7QDSaVVu_7Dk_4LPD0g8+ZUIU<guAdJu|VHY4>bi=<Q7<gQ%inPNPd2f
zopWMdS)z?Tx-dRRLW>~<G&MmkZb(U3A6}x_`2~v2XJD{M@^o<wshIQj?(=GyuYb*d
zeLwSNRb=e8!1rPCC$-j1xRWX1v{iGR;&=U@cdysZZ-35j*?+G2Gq;N3eg~#wW}K>=
zESH(|7VNsT$~=10s<m0G-@Q9Chkdj1$pv>KBQIV1{(9FZj@UGY-*adF-1q#mU`S}F
zql-&k=f8anN{bwhFHli3IzTTX;nR24fQsK$2^{fXc$02E{Btkf*juTtNI~A-YQpn&
zZRM!r3+}LXt$OtCkW!MevOuz#vj9tde9(bcJB?fSZ!1l#t9)8ruX`)UDXT`N`O<+&
zcN?p<wQq=vb9e5)UdAfFx4bbp?1G(LXLj>piJ80ELqq@8dWtN1w|>nbE33ZqRqPK7
zPW{_AXTs41cUU*AN+`(Ud%vyhQT*LniQVP$JzlpmxmTBM>FIeKo2|a@#cs~sXBOPi
zt@(1#wYGd&VRn^R*P=%*p_1R;bvB-GSH7hd8oKdK!J^a@8Sz=UKdKTqN^eU3+qY)I
zu?2TrwL*A&&Of-j&F{pjuIYT!lv@)$Z!c(1x%}2}&&`CRR}Us?ta|kRG*@NKmwTz3
z7Cn0QMd#nOYpveb_<Z#B93NVHS8=>9pOjqjceYy0{z~c7eC_-7wRJx9U1FEn)D(Sr
z^N;Lo^#x4JS8cWSp4T>cdkBNJpL&$r@sF>zx|#I%Bwjo>;fpBS(tZmO*29~NE6!hI
zb!3RMiRi24@&0sa)7{2r6AkzL^EHlgJ0>~F?%0AaitTG2Jo}<^Z*%Ekm9C0aYdBtV
z7V>z1dhu_S?6rx7du}GU{=J|f_O`4(KIlebz^@s>fd&QTQq!ZY6}Xt@X=u8nB{9vI
zx@Bgy_Ua5d$-_sVmDSi<v`qEgm$2yW@daO)BSSA%r%O#=Bg@bbKSfkgSCQ|G*R71i
ztNX+z&HuvV5LU74t-j6IZ;R(y2k*FXsgK`!S7CYL#N`iJ9IWrgZL+>#WYktZ>vmzQ
z;>+?qi@$7cd@1>7>sH~eL-SPEhBwS|KlUNbS$iEH-{F%JzF4y@oxk?0PPFyHRa$XY
z5siB{@|-T%^09li{hbWA#~(DRCowiTN-}^#(YW|WcC>m3!}3+y8>U}fxM9(%16|rG
z@q6!eJiDE2bC*wn=M=-juR6~^?prIpNa4ZJRYh;QQzP2CQ`?!(Za@Cv*Y>j+i5_e+
zY^e;9uOqjWIu?D^F}|^xr!w32<gu`CD$F9yMh7fJS($^4E3V&S3}l$6VcD?eNJU#}
z$FBXGx}Hre+;jO--)iRVW^=#z{X4#Z$6?c|Mc?0<=tWyIa#XB(!*J=*<47-lL+;h9
zGxpi`bzdusdHL@7%QA-K{!9EWjt*HdI**i;ZRX{DzccgD(`$Tx7H<|0?tT;5#uXa6
z@kzmg$`qM=^X(6Xn<w;`&Hm^Sqobs9vOU~W+EC-Kg43iAKCXsxr_Ud}cf(dre)aG7
zevdb8O|EsBeP{zuTDI-U+fn|XRyQg(PcT_%Wh|;6K3!SSs%xvEePY0`Mhz|x|Bl1!
z`Q(oTr?O8F5`O#bvuk9a;fq(1aXFdCMV?2p>FTP;X4{_3=XvJo<{|WIlktbs&g>r}
z>iCp=_ALG)&bCyZgTZG}$DJoCb5?GAkdi9-Xz`*uZA*)t&wtvqRrYeXcT9v>%C%EZ
z77JH=tzwpHbKm&jVM(+|Lvm%LMR;_p{FC%#|CX7X8UA#%Th^saa(SYv{xBs~(<ShL
z_1U*G;^Xd>2+j-+ET}G%tKVGe9Pv8x-9twI2@z!$7fp=X?CUP~JiGl^PTOIU!UJ~J
zoffgdjf;2A?C9<J@^I><G>Iv7Vu_OiHkeDQ>!=2Zh`zsRWw>|8o|lie+G+mXyX_sX
zt@pe(_FZewOztu9w3s!!^|Gm`#iYrdM_y>i$7$|7ynu&6P>u6v_2iG24jNgcNXnOf
z-8oln$yc4_Tj!}Z%%3jzz~(f=wri7fwn?6w++#L7VEWWXV^cAi(`TKSoQ)Db$fh=Q
zOi+2UROMk!tZH4@ocD!W-$tF^v{hKX{A=Qyd$u2@rarj%%(L%tkT<*COGyKslg*Nu
z7SYk2$3GQIOk!+vlyo>VspZZSl|RofM*aEWAooquM-WtcdSB!Fvv)6hFn71j15Sb0
zpH;T*y*<C=d*vK;$y~FAN^b@Ky!znyXM+KIwUTm8#nvahc2(>Wo+1-wO7#E#{6cYt
zDra~Vn+@Mr**TY<MV{*Fn!Z|lf{-w8;FIDV`*Y@~|8#2>z0TJD(BO^dvd7`=t3E{3
z@g<$Hxy$FmbBaNkgUM%6$C@Q9AUkHLa>|#LBv&P7UcOuqS^8DywiP2|#mZH4KI{B-
z`Tyvcl~JSpoeZbLA7&h%m$F3VVL%z%xhr9h*jaaWn9cs-Ti@_#aXG`K!$#(og3GhC
z3JwKz?Kt7_-PZf))y+9?kKDgyWw?3And1fWm*1<bRTi%J8pSNtX4~&2%*uHD^X9^J
z+qn9lrYox*Ix~SMVb5Zogfvg5x+0;t6<W<IT@}l3w*7o6y508l!<&|SZr|c`4ZXSf
zi{anRN3QSBbo<;PZhP`j%$#-G9%SUHJ-RWOr-J7cgLvYkfC-Df>xk~Tey#PT>UZ1I
z@6T^eIbXD0cg?LtS=r;#ykS@PtFKSC365<%yi?NVE}sz3DTNL9HtV!IhkXbP*MFy%
z^Y85T;J^#^rp>3X@o`+Jnilyc^4+Z8>-OG$Z?gUIs{;bl=CawI%vq~mImPzmf<xs~
z9G%`r&Rf0u$oVdAn{PUzp|0~+r~h2PuI`&o`N?DRo=ly`czD8^a~_9xN_|sl7Hl@^
zm=T+}^1Dv2-}dVFJ`Xo-mHl7vRmZY4BYn;Myw|p#iz_FoMO>0QsNK&0i2aQxo2p$E
zyQJrjyz`sn%f;^a3ro$Ib#V9ExdmEUao4`LZBGAr!R(vL|B(Al+2;Au-&C@VE`Q{z
zp5)kDKE?6bZrjt#Pj%Ux-+br7<~=D-ZBD<vv7`L=_v^DioLwupzG~9`HLDNta}`B0
zawYMcQrNOFy>i#FCULFP0`gB?&u?1G{eEld#`K>8A)!aRE_2?#!BSVgSI~ER?=w#>
zi)quB*6xm-aCibwQOy^=q+iyjg+qVYcuPO)shXBMSHOi)UCS#bTcf7#reo&Omo23s
zzaK5DOyS`!`(8e2lEQ=bR?BtsQpA#e`Wd}2)M$=rS@Kn9{`M&jVIh02U+Y}C^z3_y
zt@EDp{JVCG(^KF>%1g#2>PLP)T-AKK=`P>x7cBQyY=3lnp`gy1s&}4pr_UaGnZ?cb
zJmph~wTpp<bHlyOH5X0Kf0`d#o9Dk%cGDv(-uDd(4E^HbO2&rw_`_vo#KOW8WoI6+
z);0T6op9o@wdXe#XYu=iZ5<q*PWx7;|J)(|S!cJ}`$wC$GKW|FGOT;QuYbLnh0eNL
ziL!GKsAgOLyLvTWV$#)_QU0F-zo|I$H5;|qT-dB*^e(c@@6OSs)A;^u-Ygy*6S2)L
zG<1!HOor_21FdS(cCTKUuV9$Db>0)jbCY>OYrgOqX4{^A+%BN#_2bBb^qbY;)jZ)L
zqUyS-9`}T0uPNX9a`|o8b?cMs+1npXNZoF~{;SI6EnES0e2Z>Q=JDh?C9rv8`p;7j
zn+;~F)MUKA7Ir7^)8|c#k`l7mrbk&VSasm!si3t}<k}KfepktTJo(4nUbQRyy$L4|
zzx;N~=Hx7|KN}`|R&kb;-|KvV+4l4+t~~*vZ9g);+j_UJd2sEFj^5TSkN0lVJG0Bu
zls{FvdeZ*AQT<iTmvc{Eooo{x-FckL!r$o1$3CN$m+y7_!=^cju*7^YIDbh|TVmGS
z!%wgIIS3RS3Tmp|SY1<h_TM7=y#N2)glFaTOSV1X=PoYXwTsEUc(aAS5s!g}^PDoN
zc|Ff{mqwkxbnWT42gek(cf7oHP}tA*^wCi3sN-rM+?va6PcGgWX%V~j?cHZD%PJBA
z+8EcS&ukIrpLTHNi6809fA4+Bm%2Bdlfi#VOWFCN#H)SNKy`Z3;qbrnswV}e31=B!
zzB!r4xyD^m?aREUH*b0U*<vBDS)FueUvO}Y?dkGp>%yhG<sW}ucyn{imh8zV^&@Y8
z^xR(Pze&qhba}Schm1HgXQMM8`iy#JsB%`G^!)L4V%Ckhel<7ixi8DHrIh&HIj9=i
z$|ZU}(rc@xt71XI!c|&fbN#mT`aN=*%sc<fm&1zHa#O>m?UH}~dew)?i)`L>CiolO
zG0-^Nqr!E<)9c88%YFOI@7&djad;gWx2ep5Z{obAVQS@zN^_Y0$KG#R-RvyqY;-Ec
zDb!8t-1V@B?(ClglFfd9$kGfj&^Uais#JsX|K|zY%>TT9Ya6ASF?s&=>S@!Xtr$ct
z1drIB-a7Be4VJjw_d41h2KL*Y<nVOL4YTsQ9`fM0Ym`UZAz9O0+53TQijD#alU$y>
z_dPZ<i`y@3n$oqy_X<F*(K9-FyQ-!cJKorQ=f>ne>$7<?B;>wU?9%08;(MO*AZIPB
zz19`E?UIuoc9%~{Jfq{OabtGR<j_=s*y_XY*RR?2KJwg|%;gW63JwKHh1i}JZ5O__
zk@L^Ls>R{9Cm&bdv<OZ<`0-E95!a3ClJ6v@B<dYfSy-@sn|Vx_$e#Sy$Ie{3v@BX$
zwriXDpY>~FrK_ej#>+;%_Pmx|y}h?;Qu`+3f@NW=8B{-&%s#L{g<+zaFxU5GYu@ty
z-?939t!C}o^qZUx2ZU8wPx$zJ`0HAonOG{SyRLFlJU>t3$pz|>lV|o_HWiaOe>%<M
zk`$XNW5oW=HsRr%%atBqp69qdap%3wSG-;}{$f49=x*P%iJUQ6na9=dGPeacXk892
zT(*qq|6+yd{kOj`u&J&!^cLa@p7e3{;;jXzxc0r-|FqCA_4$$56<O9Hp+`SYyVS6f
z?<Ko=?&8T6%a68c$F)4E@;8#NR*GO;#BQkZk}>MM5##^g6?db~7iA@0bUt5HRXZ;=
zRddqc7}-nO4!P1*lgjuNSAJJHe>Lo(Iw<y^$#ed)yVm@IQM!8CdKtTp<)`=*PF}uO
zP&;qwR_6NL*N3*qe{Wmg{zN5i_q~?a=Iwi5dCpsXb>EJMD@=;o9X;<a{;tCvG^wC!
zZMx*ced#N0EY4q&w>`b}_}h)1f4)W8O0~Ifcv0$aG~L$oQ^$G9C(|M?ZL{7N5X$B$
zP`L2u?|Dx@i!Qw(|9tnnrTaj2%HA33np_ssX18uOooX>}{?TS>qazDc`jvd6H2?lQ
zC1fGW`hI(PVp`$n^RlvCO$T2_{<(UKGf?1TN~WSw&4tY|1z%O}ui3t+?ncGDCkx;3
z%vrng$i+Q6DsJxYe|-)7kQ^{+VXv+C@<k68pR@DP&8?Gq7cyllvmE1%uk)TBoA)G~
zt$O>{%1O&(<L_)x_q>-BFezNgM~LgY>f@MPRhdma=c~5Pdz!{>-nDAcZFbw!?Jqs&
zrTI^iV``qXXIZ`Uzxl_r9)^6G_w>=S`PNKP=Ywvxf=z8rlV0~lqCKVZRo$HRtB?41
za|s-tr;;n!JZYPY5NnFM>Vx|AXE(n&KBc{K8mL!#^6F-rz{svCGbB159c~A?OMZXu
zLDS~Fw_n?y^!oF}!EVvb$xGS~+&A5#DjoiCU(pK%B^6HjZROch7x>o2g@y9?oqaHA
ztE|?x?dCPf9{Z}+3ifVyx2@YPtLVAntl#mKS>4wrKe6$w@^}+?Y1-QJn|%(ge8n|q
zjgIpi$Mc__eYsP0{?g%JoaJ9mKFLiM{_%Dzo5?-7uj|w!gFEFnt9#x)V(nFxA(;JH
z$6K+B>0aixhx2C7TN-@x#%7TZHLfS8RZWYY1&YLZvwMxV-V#xeyK|w;P$M&1>C3#O
z+4G)Oo8?zclNDfP^q(eD_4RwimfIJtcG>w~x;%HuwaI6W%#7mhE1akDR)9@4clN!_
zH5<KVr~jPTUOBDm{HNIP^F=A|<{gi(F1*_J?Dpfhvy*$u&Q7*DJayBS!xB@rD1*vm
zEABU-h+;E%`fT?5_vbd3SeV;y4HFTaD_mY!ohKK*i`~YLvCAxb`R(P|IzJSvCxNvl
zZ>gN7`Qz+GDT@@z|8hH8UmiVs-HRdW{3YS`MGt<RO=K|OIr-_zUyJzFXD6rlPb!mY
zo>VqPh3kZ`-;w`cUnsiF@@cd6_7@5LDlwtqTjZmIJtlM3u07J<{prT!C9wyq1m0@w
zO`kb=PD?nS?2+J9SGnhrdF^)opK{i^?gpF3**vLijsk~4{)@<YD|8OZ@!OtmSL9;q
zdy?`f$9HM=do$-rW&HDlGp@g|JuSZKlwzllUe$}pyjGPii+QtKH*Xb_N%o&qqma_P
z>HRyCe|PJ-g};l{{QF*}#kEWAT|!!6<>meRUQPK}7%sm{eYrp`-{0o;(B;D0)1N;K
zDa&}Zb5&ASA?N&WJd0j_`*H5iEXKU7Le64u?b+e$Qds{6U;a^Vx4VmD^85cD?>h|^
zo0Q7e)hO^!to!l%chiDjRcZVCYi^bc|9>BTf5GVoDW&EsR&hm!Dr$G#-mN}a!%3)R
zLuE`zh^U|I&5gBIf|Eo9q~&W}14AuRZoYl59w?9?=DPn+o!3p_W_54=2mk+FG(6(w
z_T$g{_uLC5|6gb`ypbxY?YDfterm9vt7Af3WPyPdFTZ)$s-&y_|NX+|`dNrbzS<VQ
zibYTEnSh{VLVyr5!Ey7jjIvr_OQeWm*Ps7f@c|;NWv-2DG^{_pcAe=n;m7@R>R-f;
zXEn(6{Bnw5TN*j3K*wD8c+uvK=4Qw4PZ$0q_O@(G|NVo<V{0Q~w&w3I(BshXzS}M@
z_C!g!@cZ|+cI$SX#r0Wi`Q}>_Zwk&!l$_?6Da0ysV|tVNX|9R0=RA3TnllsB-QQ)~
z_iJHb|K3Y$0~mFz=X7*;9ocwCr|qd;bfHDz<7Wo<ww4)abe7#}y4$yGVqwgtZI3U@
zo&T(|-D!UM&+Xgv7!I8G=8~MYWwpeVEssM&kFE-KyZwSi&*bKZ%J&R!KAqb$`_78m
z8X>;OwnZ8hb<($GJ9`W?GTmQlr`|rU{?To(XgGU&xvEO(-}2>!E4MOph0Ix_bNk!9
zf<<QyG%}B8D86*KeM2Q~@3oF)w--lj+8xg#rsyfaazJtSe>NMnLp+`W8#nCwc||cd
z|A&rVv~{A8s!ep@8Efxbg>S+y^aX^9YWu0*Qh)d8YIbkN9@{Qfe)T((vgf7vhV43j
zmDjy^(}x^a!z5+pyXQ8SXmRD8J@d$EGq3!%?+M4wCK)ekKe}Lv?8ZkWi*j>>*2iW?
z+}{1xOvP33_~%W9>(&WvjeIHhdqSk=M~g(wqdRYT9dUQv$?M&|ruFGjF=@LOFAiRL
zbZOiFnH?oHUzX)YZ?V2$X4PjdwPuC9)WN2M_ud}1oM{mr-KlIjv+Z@3vI?jdFCZPt
z6dC#<E?a!rbhG;hA5ReF@j3gb$eZ1Tqhiq#hL@Z+yvyGv2VB~wv+mYK8>_zVvu}T_
zx?l0W4Aj^O4}TDmH|=`$c4c2RktHgRTUQz7=2xWazpJ_-eXls3Yx>$-T6d0jb4SQ7
z%1sWq<Qxqe>#}M*tt-3ZlJSF&!rL=8b54D^(jqkXAU}7pp!m+MUwH-9I9sbH7jC=8
z)UE?+FWiZZDcNnlC;1UyOW0e-zt2@$J-j<^-?Epv$j0M!YZIsyrgATHTczlv)Rn(M
zZD9Mm$L{5B&SpQK-*@eRruL6T`2xY+@4_E$>-S$bcbR=~V9%2o8S~mMJ0}KQn)YJ(
z?QI*af4tA1U07OhBH6=j<`(OdmrX@~RC+$#_C3$Lso~8Mmp{)UcwIaXOnt}rcFOIu
z=fC^hJ8G@_HXtN)HQ&Z`PyR+n&$wv)$@dSgzAkmaz@|%e_wVvaf|FWaoiUlSYTbdy
zM_D`WHSWEA>t2C^y(;JaHLIH<MLvG-lRKlgHs{ypO{<a~yfe`=xy{gE>s_U>hJ9lA
zwW>*v%Rk)C{+b~nR~XI2z9lt$YvfCd;Ov9`U7xn@y=N`rck5mOC;PW$4@!Dj)wNG=
zP(SqdUP1e!2iLY-+x(}Ry?WY~(mjh+)~~Thy>U$K<E^i5CVbLUPPz($T6X)^arHk-
zRj!-<E$=nEjef{fM-i4im+!nY+w*lp&GFLopCy%s-;Te#Ira6T3x^k|ID40|gZc~0
ze@)vuZ^`ZnJtm$jPWl}Gxl2pt^l7K0o^sIz?MuVk{X(WWLE4sYE^WHKL1xA*P@j$C
zP2?pp1C5i5U3bgsDnAAdey(S4v)MPf$7p7OfiZ7-mTrZKy>w^XH<j($r*fPg-V@mq
z5z5!T=)vQ2c0Ri;AAm+4%N$rguvAZb!pf$)b8AZZg47h5=}}f6JhvCJo|rVPEBLDD
zkHSQ`M`bSyZ_o9Z+`{}jxM15cruH=tKHqcn(bY@Z96qsY)uQA-=Wbb_Rlm7eg_+0e
z*SxRup0Mbd-2RYxenGhHN#m_zfA%c-u&{d4ZBa!}ja%Er_ge<>Mo#>gdS7>Y`80+L
zc6IDsTpS7KC)p$iTw(@A(BvPFx9jeB=IVcG(!8$ASFJBkzN6<UGGY3pmj9(&_|yUq
zO_h5*@9EKLWh+*7rQg`BVlFXdOWIm>OT*6lak&nUKim+jp0rGMHG{3r+H_5&`h1}{
z%MjMhDvu6-lRKlk`+elM`*V-o{*_tbYWU4a=cV<H#VQQa&popgZyMOV+NiGiHp#ze
z_m_F85ATV9#%oyhtEc6g-~Q15O)l3;uzK1S#`hYW3^DUn{T^+7&6^=3_jN~D*lW*a
zk85I8<<6eJxpm%?GS?X@BJYa7>bR~=udFXEy*&SJgjf54d8)P@j-FBeMIrA!tMn$<
zuUCJ$nCrsfE|Zx7JIeQ_|D3m0x>&AiTJz;Mk(a(3Y8<|^=q{-BvE#`rPd#gyJm*XC
zwkO|qnPeyaG_XB=yqkN%$$6<ak^|Zd_o`RkTy=JG4CjM+Ps&_p`K+6l`m}A{(`WCM
zu2)ZEVi5J8CX;a9=!~Jp;S0>RC)K3wUdx*BoOF2MnK%8~W}k!keODa9L~3r9UtW-I
z$>|!(z)>;H_Vk8h3wVsPm)}k<|8P7%s^IOj3CAb&7`=aE<5?g5I#O<RYviPYrMvGZ
zM||_~dzjSCyDEELYLO3*N?7|Lzk>hkmh0ZM@c*>^`m2jC&f1*pF`2VU@A%_&6L_X@
zT{wKHYT6;@&pP%w?+?ED^l6#>`mZ|a1q)a5GTqqB6aFT!jWaZK<D-H_wKYQf_eQ;c
z<ym!ackJw`eVez6{aLcY;k6=LrN=84#&z>jjr>BxAEbn8CfR<`ncLMg+qn2mWSil=
zz&6gmn~#LAH(l^?+w*x#G(D;&-QKz}U6WHu&*b)<Z#r}Dzm1$GnphvRrhom6Kdp|E
zY}4NGMYtdT=(hLj2S1}rDhthwWuL#&D%8=h&~o6<-kToDv|LI0+-8|M^Nt>!R>p8Z
zShe*;!g(Vd0}bapk5<W^1!cgb%g;Qe-rTjDF|qBkshG{Q`H#Qedz&DhX|(XbRZx9o
zXY?ZS+r62`c3<;jIPmyr>zl|nMz&Or==th5`~0ifHawbg_g~JIn=IbHl%xDVIehr4
zvfcZA<h;VOoBymSKX7&PpMU4`?nvt~R4jVJaOvh|o=l!o0!vnDg+2XRIcfe^DM-72
z<;Dk{3hzAgrrVwWblrW`!#hdmKh5k?QaN6*J9$M?K--Mo+d^9_Cmoh^E=u<QbaE1N
z2iK>0Ps%(OHT;>9@@M_xtp?tUR;Ta$Y3&W_*k^2XZ0$-vb#pUM=)=Fum5yZCdbY1t
zUwMA=mfe4Uew~-XDyOgM5W?|AWxKZki>}W1y6x#NGxLA&+{?;o=6W!5Q_`Eehv%iP
zNbEOq$=L(yipQOqEW&yvZTaJz+@)szMlGO-xwqNp;7U*rzH(mr%Mg(}vt}N??Q8aD
zvGeOltv8WvBKHE@IBMrTS$$plVv2$PC6%rfr_UYG&t6?%Q~&*29{X<g>S;|NC(G^%
z4m3D2FZD!HKwE?AZrN)WIsdG@*Tts)J@3Df$1~4gw~tj$^K(7=dz;?+s%cCO$8T=t
zk>ok0u>9;~nZwq)vOBC^RKDMKZ?aAB;<J-^rq6p)mYF!SNqqIG7~9ivhvz*_%~lT)
zt=O}KVP+?%-Tt&&kvyjqwxyIWc>2vkFVgzM%!f9f#yXh~Z%@9nj^p~hg0<<IkBt}a
zeJEV6z`?*$FfWxOA%JaR?`M_!dDotKO1-_S=YO}|c<aNvlke!&&U?DPKP3CJ&hPxB
ztb-Z(JGdB`zPGVkvZYSAX~Fv9YyPalzt1*2-+O!AyeBJPaoI#hb!Z6xpPb)*Z}T7j
z>o%wTd7ed<`TISbo3=LnXGx@C(6<fgl083`t97h7KY7lP`q<U(lg+EQn}1XBedC!o
zU4dhQO76U;Kdo}}f3Uy3J0teip7fO~w0<q$GEbGk=Ip%GfTVsSj|^K+$=J!|;5v@C
zzjB_sX5Jk2lk=YV32m@D|LJM7s6}LC+YJx>>S=S|M6xNfrA~+_J@sPFj^^;|r=GZd
zRx$o~+Bx(`qN85*q}?gW=Rf82vTmFxm{(txJ1=!IuY7st<xY)?4;QUDbSz~y^!h&X
z{~A_fZPN5x``!K>ouI0)=&MTc<_N}Hsi0AkV7WWec~>^qEWgRd$W#|Lr~gXDyf$`b
zw$uhl(+ARg`Y$%^&Aaz1`>#*#N%HvBw9otx>-BpDiz}zaP7dCYlH1NDYOkyHpktfD
zfdxDXr}=V?em)h;egAFK@rApeeXX42xTD<qhHK7@^p{gaYi?P0tZY5%dpIaCbW5-A
zy3kfReLa^f8~bve6rNKIOIB&U+0^sxqxFspYk2Q|m6@bAW%=8~GhcCiQ~B#LKmDit
zy2@$s5@$AV>GN|4+cNK|{wY3%PTSKBM;GuI{1REP?&R)@zq8X~WMw`{ObS#J)_J+o
zA~yG8)uco_|4aA3%zIidv$9QtW5*@qhM7yxhCgI-cz#57&)p@V?elbWueAN&WF=Bi
z@k-X=i&5qCBe8$#<F7ydbv99F*UT{HjB*1|%dd2UjJ>|*6PuF^3%=^Og1QQyOQ-&D
zy~)`BzAH7<_5R(=JvS4s`W=px<?;YcSKjw5^SgJnK6b79-wP{_U(WRu379&u3silt
z*&$G^be88N!@}=6-QV6H1$E+A=s5P=e6ub6Wq`<?IrEOrmyZ6pex2PJ9bID`W}f+%
znOd$*-qRhvUW)&&X{Qoj+LA|8!P9tVLW?R3ggcac)kNOk?-pNuN|94ZZ`+pSgJ+La
z?zQ^z-0iR7ABJzo-hF)fbP_wyDTWO><qL9sW~={vyW#n~lon5?&|PAaKy55dwOh-t
zdqoRp8a+I)Ph`%TOI6e4x0EO6Ri=KPw{%THLKd6&tlR)bo4A+*^HLiU0)EZh7xXR3
z+j^gr#HSmeHd4UMe;j_-LmsiSTFUH_R#|i=Dq>$n!0hy&8()9C6%3k9WDaPYFSa>d
zl0#xj-Ii;Uw^Uu9tmA#Reflcx51Sv=)J>B}*>e0t?JXN`*MraZHs`H&=KrF@`b~$K
zvH24Jmw8KabA-fa<^0%Eeqn=p<gXKUpayGjs_Tuz^HKu33_LaJQuO~l-e7kA)BV5O
z4W2%no&QaTnZ5baanSt8kKFEX+moNRxPSO`kjo}Gt|jA}O7kqwE32IM+XXRC*4UIT
zsq$m_x%QCIqnj>wuH#(GbmhUVX)pX*R2CX3i!SHOaxPzdb23lWFPqbg{%TsphqF#r
zdH7z|cE<M&<}vRhnGT%y=AO7oy{&wj-7&eP{cG-ezEXUd>JQ4I_S$*}=6Rf(%p>~R
z_O!^_^h`}xpB8c3(`?%RuHHRAZ>ju;zDw?5scY4L-d_DRLqh6cZFA+MP|1}QPXk2j
zCd@xQFQwxWUq#S|N!j^363-X8aeJ@0HTS9AH=Wux$Cu2f`Pie})jzoH4gLM-Si<|V
zr)~36l6bTo7V2C|ae4@fNUi4Jz>kjA;?M5bv}N0#j@)zi4x6Y}U06(m$v2(aCdZwp
zr^&3Fm$Lc0?MY)(v2QAeJFac^Ip)oN`awua&7G_LH&wrX*4Z6v>n(cVyv6xT>Jn3S
zXWV?)Q0g*U{b%NRhG{)+OP9Voe)_8DkIX{G>Pd-i|L+98kF2{`Equ9q-t)*;@7K)N
z&^x01abw>7!b{%_HOdc0`Cl^i|Fk*o@dTaP6#2NauPT=QMmDZ9RHp2xn${(5d-~c%
zPK_?6iMFR>j|abxRamW+clOM~vsK#r-bjDh5ft{`bKbg*4`zN!`lfPtj;BcA7JJtH
zc2Ujq#hxnPd=UNbT<7kJ-8*^bf8{&$Zc1&*)Q9bkJGtj<YRYeSyI7pg#U1xW#eerZ
zm79}Q90e3QmAbxVf~U!No=5VXJ@fGNzS$d2^PIYS==Sv31x`m-9p-G;sea=bmcHy^
zPOfTQ$c*J*RcfoIimWjErW0=!!RN?uZ);i7&4&fm2R6Sc`1Q-^Fc&zvKGEk7eYo+D
z%r}+H;9rdAz1dGIaxv*`E2;Q%XKES0{NBF@R5~pt&F1v3($aQ#GSP0)+j@~_ihcgF
zb%z&xVGb17U~eW?tyJOce{9j&v^RH!nK!F^l-)MpK%=vRhrUG1)1=H$)@d?)KDv5}
zw{q@%jITfX@W|h*p%$X4$Ln(!y$=daxcc(Q+*Vc{YZ>c*uh(16oe>?`xM-F2zijsI
z4Ln}U7cIyx;*!6yYodVc*3bL*tvO(3*ZEufo9zFso%_7QM0(ENKYVmrnaixQfBjp6
z!wih-`O6~jeT=t`XvqzTazB<REBXEVQi0srYgS3jntS~HI`f~ugPg?f2Tr<gk;=*!
z*|=y&{gUX?Q@cM|cy1Bp@%m&bdMoztg*P&8&z^03RU+TQeA#$IUtjXghYeE4-k&(J
z|Inw4v;Vlae)G#M`6qWTE9>y{z0Dg=zvJH<<#sGl%+;f*Be^oN!)T@gN7H+~85}%b
zpA20C4GQ1e#r<C<8NQ3XAU*No<MaAAWiuq?VzUKgPM^Ltbs5t{bsn!zlFQd5RFuip
z=j9w-ewTSias9#7r%o=q%lP_)#XXU=0gN(d&mNiA(Q$Wwg3|N}yr+Jw@Q}GNoyqgV
z-|Pt2P{X}DT3#+yy)pM;^vWA26i<Vu(w0u+tGRQv-BaH=X#0a*n@u{ETpS%@%d8$L
zDVKljYHnvxFf(Gh>Df8K<^KOV|BgN<H>W-E|JMBUI>DBjmc%qqSO3_bQ>)JZzgM=r
z=e2oz;ooNl%jS67{r}A_)vl9VktX;4<}T2xnsRY5`Bi)NWRyR+^}xU-T;fW~vbSFR
zW&)h+{^z<0xu&QbdmIzHG(OJTWtK{c%8|o5h8oR&Q8hZ39wq<%^gXW3qM*K1G&gzj
zj3}P>3rbEHwWU4#(BBsqld&zaER$32mP6aLxqdbGtT`N785WquywkB&S*pV2(c694
z`03G(ey-+v@j8J`j-LOP@1I<A|61#IsoWW{kq7-uYUXq`ov+S!YG4pO@Nf>#zRgQw
zLE}!#oviQfvo-HOeU0zW+O^!9PoKVVQfx_Z;D`7Bq6@URQd|!%{&{l^-^F!_0hjtN
zRZn{S`TNJ|$HHo^+-%>?-EA#3X_x47Ve`DZZEG|HSQwAr+`J_A(1InhOEtKTy}x_>
zeZI*<uk#0{`ENHDVqsjQ{UybG@{DU$(>5`A3KW=_a|fpV+qP`((ffg0lIN^D{?^`n
z%cIM3kLRTvyqChk;P(7|B6vBSdAHK5O~xD45A~e?owtkkwCir>YZnXutlY=y=X!Lb
z%{QIQbVH3ycZ(F(?di`8PTuP4xBhMLP*KIz(IKx$$uDGDQq4cRfA=nK&J6Hc#Nhp_
z?n+v!Ls)2xw9K(ppMTr4@2HyQyjZ|hu!%wD+&Kps|Ax$V1C7kvGMjopOD3*g>-~Of
zSK)DI{~#e(&}4;6s^&aR{X<jdIrhI7dAxX0fr`5DA_a#qkt_VYyXLBGc~S7-*8>9;
z#|DlCYTC9rRnxZYmYA|d_m{Z&|BUxXZ~NxXh>a_|J=vsF$w>I|#~4?`xHVeNQ8jaJ
z3w?F_7&sABngw2X7qeFRaYT{uG;KfieJ`Td^Ic6%-w^F_``Cgd?aRKa*l#F(y!M#c
z8J*qlJ!iQHN^*XzSht8}dA3f4ie8!b^2^m{(qzt^YhhqqAD8cP`$fy`4JBG!cJHMd
zxTfB3yWE!$aB14w?<&s=Zr<wKzqK?u?PRTg(O<tH?vsBvR$uemyjPWjDM{(fiqn4V
zEmnKV_HwRW{5WFMZf<YUczzzoz|a+|PR&!z<!+v|?a9G;DYFy(gG5+BL36V!Y0kQB
z3H5j8waeX6;9&aZ;<7+boB5UDgZ=w+4{mo>&f~8Lo8xz9N2RpiH=WGc5>vLQ{<1lF
z|9-}i<+jao{@WQ9)GY<ihfjNS<zUg9{_eY<<~}LUOw2s?a1PIkRa}!*9^L=D{myqC
zSw53pRnwR(&QFr#Yo4^{*%b3{D(t+cpRiU>E?l%p<oq=8{GW4|sArc?yBU@1A;hx2
zYFhr4=!Gk^`ktnKoU5A4**s~_qVR?<m6Ps$Q<<-)?Nk<YXr0ZwWA3hx3d-0Ra@k&l
z^4p$vy)JGuNySNk<(p2Xx5ShbkJ;*$?cZ}^EABtrsh%n1nzHt*O83Og-ij-|JFZnt
zo1}1HrACH?RB5%;zAYQmH91zC^l^7GIXtPyP~)WXUZyYK&&BPzoAP;+dL*d#Hc@PU
zasTakN8DW>dHAWZI5q@_U3ipwUT9U9)xF3`;u2GyO#AotTf3}n#p;b$cc{;l=zp%G
zY7rdV_}BV}-z8~7K~M}gFjQRK{O8Qo%^?gjXHOoP=do|I%6Ew=Uu>)Vi%Q<f-77si
zc}`qR+3xV@{gZ3%1vOp1D)y$jW8Rap+&7V{oMh|j_i~2u+j>_zgY13$CUO!xk5^TL
z+$7=e;vc6kGyAi7^Ysr8++KT5dr;!b`t7(~4QL4J@(1Tg$peO0uWptxeHVEu>ineY
z!zX3gRBassMeamK9Ffm6c{KSQqtBh)?>90nnV+|RkF)K`w^8pS)0|inCaL_{{a!7_
z{6CL3AG=h3|DlB^8W?0wr=Fj*uiw!#JH_?ji$Au1c5W=yRk_Q`v*4?Wf0WlppQGo#
zscg@FEN-*u(W}UBe7pXpM{cpcU}e>4zHh5jXow6*d$Xe_!!`aqz3n9r=FhE_a}Ij%
zId6^5(cs-TCf`~2b>7lfrROiHbWTtaERB@xc*@AX(?ElhFY?%1%UrGt)w!{9_dRFH
z%}|e&*`T;G`h8^Ax_M7IrkP0!@$9IYX5KES=qaH1AR$f4eTyIiTk4I=_TGQo-St7W
z-j_$4j!!t_({p^Y=+TOmt7KN%z4FWpuL%+9dy?k#A!**zO@}-EtJ$vo+qr7<r9;X2
z7L`X9@Fe@W9(?<=vL@m6!D!#LGmKU+to^Q{YxmwWFT5vvo~ptF)8=_godZ93mfMGf
z&Q7n?u$DEfNte7PF(vUu<Ghsady`{6MW~&>;(W>9_T<}3)zjKHU1q%U@VmKGqU^^Q
zum79kt#!i}KJxGrTjUcI@OM7*TN$?085f<4KK*0*cj-!7|Fx<~v;XhW;5gysb)Z=E
zXts0t->Z}F=>0YMQ&7gXfT?~}{y}rOXOVi=b9R_nuq@YcZfH1cDfuR{O%rU+r*iXI
z1xLH*rA&K0cgeneiZ|Y=?DyWk*=EjcM$ua~r}b^W=!n1NtFboW+P!ASHtV#a{0fOj
z^&btCcFS%5f9J+7u}Sl)CRNS+Q|YZ4)cj`d67~CY(>0~O>Ac<0*Ynu_{x*leP?>Y*
z9$23?IJ|&I*e~+ni9a@aTT31+)_Xi}iN5>NpQppBrg4{9ziM|&5OU=Z2$(&!En;3<
zJ1<+RM8OHJyz}QCH<vx0x8#Y{4$DN%gT>Q?o~Ws|rFgvBcYkw<{rRGU?+#pVcFumE
zvHIp_9%-Ia0?$iYpV(Vf#n_%y?YBMorNsi&y#Xx<l#>^~q`2~O^)vy7-N9|8B{HY;
zyk6da8+mO*x}=!I6vZ<uH6&(Uey4IjE9dZ)OOtNTTcXnC(S6<NQ%tUE&|wvo(yH@8
zfgyX!_6ojzJUQ=hQSy1Ag!4u+1{%&2;uxi7?{4@0W#idA;fzPe;mc2tT7-v73N61K
zz0_aU>d`E#FYVI48Pc;0q^G^M^=8!ykvV(PMP;?KVz;AY(wDur%irlgI^Uc7W5w$0
zkGHu53QU+P(ZBpuSHZq@Lcbp;HE&bj8QY`)YS|nw+RU(`V(~uFug=OFPD6$^+aFF5
z?PzLnzWFAyO^YqHBQQqs=WglVxNj=LALeAg$jge6aWUNUf5PkXX~&B;Gm1`dNvsw6
zrS@gs(!-+5+BX{u9Ic$D*zPFlB)3idXN)W7r9+^2{5ALK!#O@0tGGM`KBoTn<xk9X
z?_Qn0a+MZeYPynIQ=+7o#1zFtn%V5;)7RYlrt<iD)uf##7sfJHMQ~ocmAQi}H~+`i
z*@yGw4~xI)+d5BGVS$>qY~h@ANh66ViVm+m&poUS_b>YNuj%%LDK3JM>lnq)hfXn2
z;Z<>Sf1Hsg=Hl40Mk_-`uC)5V;d!Yqk^<N)bKifH`FHbTH@B~uPvynQHLLb;EawYT
zux;$Web1w(@6g9RYqnNSTl*%mO_nWHU~=qhX}gT~$8P)P%02h=TGX+nCBh~)uI2OA
zP)Fku^MDL?n@9nMKkwXxbL3Wmde)&20{E(@9qe<IY`ON#^W6QzsoCuRu3dXs`2O3m
zc~5@s^0E;UulcO<_w8=m)7SG&7G9k_Z|Ne2fI<uN$yXOD99zI+#Q#M_`pdj0ixT}m
zz5Jv!XZ6}c%7q85{V&}&dmU-U)0=QL`244526I3wcLYEyk=arwT>PH*|6P^xx-S(s
zCvTbZprn*F=yB5Fed;@7@4t<l<-!;*`AsKwX(^+{l(`4@K6LDGlytfFEzf+*y4EFu
z{-07(HRo;KpuThRzT2BcSOw$1+%Nh4D*56&htnr4{>}cZ(=66(<Y6Jos=WF0j*Pbl
z+~eNdJ?G^ES_o$o8`m>OKz`~o{a+ns2X>_I+_5lS)8Rp8#e~EFHpX_H$KT%A+}mFI
z_^<94mHDauMb}l2y|DFW+_Y*zVugry9$$snTfP`snI;u3hLDi-Wt-C_RV1b`o?EFQ
zG4JZPy!Z^Wcb;h~8SLiE4`s|-vR~E3{b59$nL@_iu*H?r=6~hc^>6nF?s*9Z7w{x(
zTEu(z?}ok7`KGsQ&Vkxemts;secl>c(PHa4{}O)%3&*;7PgSPbzTw?}>F)Yai363>
z4mLPSHhh`)<is7@n5>rvew#h6oOCPYQ_`IEs}I$EQ>o2MJ}<=Z?}M*!l)LR$olww}
zzMpI3s#O+;=A}9$2e8ffI`0Y7ze_i|>dU@Ko|_CR@h_*oPA&ST^4Hm(*M8P)R?!2X
zm9h6f8JxfL@<?g;`5+NinNz16rp!!~?2wqk=vLUeWLx=edExJO<nJ&<`G2bP=JX4X
zo**S(H~A#<*{btF+Dm7%2*&f?`E+CR8hNRU&m!g0&l?38YA|=)n_RPbVJNpJ|MJ@m
z6DH5;m^?ML;$Bdb$-c=v64|z=-}q=~DReRId+~W)`pL>^jh&8?3fHP9y<Jy1>76sE
zai*sFpmmjzO>CIRrt0m@u}&o-eJS}bBKfMWZ)T~U#@OyC$sqT_^VTlI!t;NvJtt?s
z|Mq!WkWF-;%qNiYBp;E!XDL~>-mSK$6OJzUvia-06#jd8+a%xJy;e2ps6}Y(!Q*cg
zCJM;5Chv?42;G}*`EZ-nlCzs-VydsN`Klv1?ccElUz8&Qx9+{YEpN}=J8dhEs_v*(
zUHM&QzMg*3+&p8I=9BX{jQgIaI<kD&op1A9XYRVnX}|s_UQ12?@xd$8n8(XLFoZSv
z^Y@RH?-y(<uP(fq?6L0r<TZ!>MSN3fJ{xu3$Xr;U;rjl}Zx!h)zv|>WhJ~zP_~znr
z;O|Dcw`Och<uz7yy*!$_=jxTVU(w2P?n~ExSLrVJH$iOD8&9_7>{ZjWKBP65*0S7I
z3p4wo!=IXd;NhJ=&ojOoYWzL$Zpz2TwcOlg*6-GW7RsurKB)B8^a~AtFwx?hO7q>Q
z^G5pIU8{J%?6rDPIWP6^1&&|ue3I)@k3N1Ac*~Z@%RV4Pbn)l!5298+3d<`}id&&`
zc=H?2Hj%&QK6&JcF&xn8{ITf#<$DF})85RsUo<)2K;y5=#|aaD{bKv3;vAh+GDU1!
z6T{5~=_h}C{7(vClh~_XdFbloCl~Hsm?p9IyUteq%{Fg&)&D03uubT-Jvsg6WFG0L
z^G4DQVZ59AcG&Z(|4#^D<CvTsB2_)<;mgL#X<g6fss5B}He$$`<FhWjp<<V8oZO2@
zrXPpR&u-}J+x_>>#F}o$m+9HICyR3DsZ>vT=q<o<#4Q7~YRn-^;DUk8t8WeMHSLZs
z)2sYHnN&||`D%UI{qtr;E~Yb+UKHkA{A6ps)L%47=tEMf<h;#O6sEG9KT+afdZeUW
zsG=^sX|?;e_y3~TaQqZ(2JiW(a+2{2oAz+!C#96l>YlSbI{Koj(>XbI6c$UYuHpI4
z&9=1Pa~-&pe-P<ySTuRE#H1%-MLs5d&r=Voa0SFvAHKQyi!Iwy{+G9f=4)vmdMU+y
zN%+wZzezkVs}F~AY_Lwv-Mx5n;!AIvxUYhqA``{ryFtrrrnj$>m;~y4oQ{`W=T>>E
z&Shpr|6}hz+ut((n)^iGi~m7*JD<|j<+r!E{>uE3{PUK$P5r8T$JaMls;Ai<T5u<I
zk%EG<BHw<E%T7i0F*+{aR9-5+2yU0tKEbx&k$!J(hJ@VL@2Rb<Y)|!s7*bS@JdX6{
zJa;W(g4Frrxwqu@PWcz%$j89ANMpgo#GS7trtH=*Hk7*>nQ4)z=_=(ssjTDPW}9i#
zSw*!LolUcdkiHelb1GJW^VW(j;;T*tOx!1Ld9sto^HyQ!#X!)04Gzyk7tY9>5sH8L
zMvU#L-UJuMr<uPe3W#>zIN{Nz@^;pf85uGsPabKk)$Ev{QXlVkA}Qb<lc&hUsWTqD
zKfS;}P+YWghsL{q_k8z+t1F@JoKsk#rQS5@{~7)D33-*Nmk(}fVo-5&S2nad#UJjr
qrO(mRC8eSNo{I~c#1wiG|LVWLUG^%f<6ApuOSPw~pUXO@geCw8c3j*5

literal 0
HcmV?d00001

diff --git a/frontend/src/pages/GeneralSettings/LLMPreference/index.jsx b/frontend/src/pages/GeneralSettings/LLMPreference/index.jsx
index 45ad5fd70..8c51e559e 100644
--- a/frontend/src/pages/GeneralSettings/LLMPreference/index.jsx
+++ b/frontend/src/pages/GeneralSettings/LLMPreference/index.jsx
@@ -14,6 +14,7 @@ import LocalAiLogo from "@/media/llmprovider/localai.png";
 import TogetherAILogo from "@/media/llmprovider/togetherai.png";
 import MistralLogo from "@/media/llmprovider/mistral.jpeg";
 import HuggingFaceLogo from "@/media/llmprovider/huggingface.png";
+import PerplexityLogo from "@/media/llmprovider/perplexity.png";
 import PreLoader from "@/components/Preloader";
 import OpenAiOptions from "@/components/LLMSelection/OpenAiOptions";
 import AzureAiOptions from "@/components/LLMSelection/AzureAiOptions";
@@ -26,8 +27,10 @@ import OllamaLLMOptions from "@/components/LLMSelection/OllamaLLMOptions";
 import TogetherAiOptions from "@/components/LLMSelection/TogetherAiOptions";
 import MistralOptions from "@/components/LLMSelection/MistralOptions";
 import HuggingFaceOptions from "@/components/LLMSelection/HuggingFaceOptions";
+
 import LLMItem from "@/components/LLMSelection/LLMItem";
 import { MagnifyingGlass } from "@phosphor-icons/react";
+import PerplexityOptions from "@/components/LLMSelection/PerplexityOptions";
 
 export default function GeneralLLMPreference() {
   const [saving, setSaving] = useState(false);
@@ -153,6 +156,14 @@ export default function GeneralLLMPreference() {
       options: <MistralOptions settings={settings} />,
       description: "Run open source models from Mistral AI.",
     },
+    {
+      name: "Perplexity AI",
+      value: "perplexity",
+      logo: PerplexityLogo,
+      options: <PerplexityOptions settings={settings} />,
+      description:
+        "Run powerful and internet-connected models hosted by Perplexity AI.",
+    },
     {
       name: "Native",
       value: "native",
diff --git a/frontend/src/pages/OnboardingFlow/Steps/DataHandling/index.jsx b/frontend/src/pages/OnboardingFlow/Steps/DataHandling/index.jsx
index c86a62a43..f9c4c4169 100644
--- a/frontend/src/pages/OnboardingFlow/Steps/DataHandling/index.jsx
+++ b/frontend/src/pages/OnboardingFlow/Steps/DataHandling/index.jsx
@@ -11,6 +11,7 @@ import LMStudioLogo from "@/media/llmprovider/lmstudio.png";
 import LocalAiLogo from "@/media/llmprovider/localai.png";
 import MistralLogo from "@/media/llmprovider/mistral.jpeg";
 import HuggingFaceLogo from "@/media/llmprovider/huggingface.png";
+import PerplexityLogo from "@/media/llmprovider/perplexity.png";
 import ZillizLogo from "@/media/vectordbs/zilliz.png";
 import AstraDBLogo from "@/media/vectordbs/astraDB.png";
 import ChromaLogo from "@/media/vectordbs/chroma.png";
@@ -109,6 +110,14 @@ const LLM_SELECTION_PRIVACY = {
     ],
     logo: HuggingFaceLogo,
   },
+  perplexity: {
+    name: "Perplexity AI",
+    description: [
+      "Your chats will not be used for training",
+      "Your prompts and document text used in response creation are visible to Perplexity AI",
+    ],
+    logo: PerplexityLogo,
+  },
 };
 
 const VECTOR_DB_PRIVACY = {
diff --git a/frontend/src/pages/OnboardingFlow/Steps/LLMPreference/index.jsx b/frontend/src/pages/OnboardingFlow/Steps/LLMPreference/index.jsx
index 6970dfa1f..296a28d9e 100644
--- a/frontend/src/pages/OnboardingFlow/Steps/LLMPreference/index.jsx
+++ b/frontend/src/pages/OnboardingFlow/Steps/LLMPreference/index.jsx
@@ -11,6 +11,7 @@ import TogetherAILogo from "@/media/llmprovider/togetherai.png";
 import AnythingLLMIcon from "@/media/logo/anything-llm-icon.png";
 import MistralLogo from "@/media/llmprovider/mistral.jpeg";
 import HuggingFaceLogo from "@/media/llmprovider/huggingface.png";
+import PerplexityLogo from "@/media/llmprovider/perplexity.png";
 import OpenAiOptions from "@/components/LLMSelection/OpenAiOptions";
 import AzureAiOptions from "@/components/LLMSelection/AzureAiOptions";
 import AnthropicAiOptions from "@/components/LLMSelection/AnthropicAiOptions";
@@ -21,12 +22,13 @@ import GeminiLLMOptions from "@/components/LLMSelection/GeminiLLMOptions";
 import OllamaLLMOptions from "@/components/LLMSelection/OllamaLLMOptions";
 import MistralOptions from "@/components/LLMSelection/MistralOptions";
 import HuggingFaceOptions from "@/components/LLMSelection/HuggingFaceOptions";
+import TogetherAiOptions from "@/components/LLMSelection/TogetherAiOptions";
+import PerplexityOptions from "@/components/LLMSelection/PerplexityOptions";
 import LLMItem from "@/components/LLMSelection/LLMItem";
 import System from "@/models/system";
 import paths from "@/utils/paths";
 import showToast from "@/utils/toast";
 import { useNavigate } from "react-router-dom";
-import TogetherAiOptions from "@/components/LLMSelection/TogetherAiOptions";
 
 const TITLE = "LLM Preference";
 const DESCRIPTION =
@@ -128,6 +130,14 @@ export default function LLMPreference({
       options: <MistralOptions settings={settings} />,
       description: "Run open source models from Mistral AI.",
     },
+    {
+      name: "Perplexity AI",
+      value: "perplexity",
+      logo: PerplexityLogo,
+      options: <PerplexityOptions settings={settings} />,
+      description:
+        "Run powerful and internet-connected models hosted by Perplexity AI.",
+    },
     {
       name: "Native",
       value: "native",
diff --git a/server/.env.example b/server/.env.example
index ec6abcac9..863486ad4 100644
--- a/server/.env.example
+++ b/server/.env.example
@@ -41,6 +41,10 @@ JWT_SECRET="my-random-string-for-seeding" # Please generate random string at lea
 # TOGETHER_AI_API_KEY='my-together-ai-key'
 # TOGETHER_AI_MODEL_PREF='mistralai/Mixtral-8x7B-Instruct-v0.1'
 
+# LLM_PROVIDER='perplexity'
+# PERPLEXITY_API_KEY='my-perplexity-key'
+# PERPLEXITY_MODEL_PREF='codellama-34b-instruct'
+
 # LLM_PROVIDER='mistral'
 # MISTRAL_API_KEY='example-mistral-ai-api-key'
 # MISTRAL_MODEL_PREF='mistral-tiny'
diff --git a/server/models/systemSettings.js b/server/models/systemSettings.js
index 29949d3d7..415448282 100644
--- a/server/models/systemSettings.js
+++ b/server/models/systemSettings.js
@@ -176,6 +176,18 @@ const SystemSettings = {
             TogetherAiApiKey: !!process.env.TOGETHER_AI_API_KEY,
             TogetherAiModelPref: process.env.TOGETHER_AI_MODEL_PREF,
 
+            // For embedding credentials when ollama is selected.
+            OpenAiKey: !!process.env.OPEN_AI_KEY,
+            AzureOpenAiEndpoint: process.env.AZURE_OPENAI_ENDPOINT,
+            AzureOpenAiKey: !!process.env.AZURE_OPENAI_KEY,
+            AzureOpenAiEmbeddingModelPref: process.env.EMBEDDING_MODEL_PREF,
+          }
+        : {}),
+      ...(llmProvider === "perplexity"
+        ? {
+            PerplexityApiKey: !!process.env.PERPLEXITY_API_KEY,
+            PerplexityModelPref: process.env.PERPLEXITY_MODEL_PREF,
+
             // For embedding credentials when ollama is selected.
             OpenAiKey: !!process.env.OPEN_AI_KEY,
             AzureOpenAiEndpoint: process.env.AZURE_OPENAI_ENDPOINT,
diff --git a/server/utils/AiProviders/perplexity/index.js b/server/utils/AiProviders/perplexity/index.js
new file mode 100644
index 000000000..df20df203
--- /dev/null
+++ b/server/utils/AiProviders/perplexity/index.js
@@ -0,0 +1,204 @@
+const { NativeEmbedder } = require("../../EmbeddingEngines/native");
+const { chatPrompt } = require("../../chats");
+const { handleDefaultStreamResponse } = require("../../helpers/chat/responses");
+
+function perplexityModels() {
+  const { MODELS } = require("./models.js");
+  return MODELS || {};
+}
+
+class PerplexityLLM {
+  constructor(embedder = null, modelPreference = null) {
+    const { Configuration, OpenAIApi } = require("openai");
+    if (!process.env.PERPLEXITY_API_KEY)
+      throw new Error("No Perplexity API key was set.");
+
+    const config = new Configuration({
+      basePath: "https://api.perplexity.ai",
+      apiKey: process.env.PERPLEXITY_API_KEY,
+    });
+    this.openai = new OpenAIApi(config);
+    this.model =
+      modelPreference || process.env.PERPLEXITY_MODEL_PREF || "pplx-7b-online"; // Give at least a unique model to the provider as last fallback.
+    this.limits = {
+      history: this.promptWindowLimit() * 0.15,
+      system: this.promptWindowLimit() * 0.15,
+      user: this.promptWindowLimit() * 0.7,
+    };
+
+    this.embedder = !embedder ? new NativeEmbedder() : embedder;
+    this.defaultTemp = 0.7;
+  }
+
+  #appendContext(contextTexts = []) {
+    if (!contextTexts || !contextTexts.length) return "";
+    return (
+      "\nContext:\n" +
+      contextTexts
+        .map((text, i) => {
+          return `[CONTEXT ${i}]:\n${text}\n[END CONTEXT ${i}]\n\n`;
+        })
+        .join("")
+    );
+  }
+
+  allModelInformation() {
+    return perplexityModels();
+  }
+
+  streamingEnabled() {
+    return "streamChat" in this && "streamGetChatCompletion" in this;
+  }
+
+  promptWindowLimit() {
+    const availableModels = this.allModelInformation();
+    return availableModels[this.model]?.maxLength || 4096;
+  }
+
+  async isValidChatCompletionModel(model = "") {
+    const availableModels = this.allModelInformation();
+    return availableModels.hasOwnProperty(model);
+  }
+
+  constructPrompt({
+    systemPrompt = "",
+    contextTexts = [],
+    chatHistory = [],
+    userPrompt = "",
+  }) {
+    const prompt = {
+      role: "system",
+      content: `${systemPrompt}${this.#appendContext(contextTexts)}`,
+    };
+    return [prompt, ...chatHistory, { role: "user", content: userPrompt }];
+  }
+
+  async isSafe(_input = "") {
+    // Not implemented so must be stubbed
+    return { safe: true, reasons: [] };
+  }
+
+  async sendChat(chatHistory = [], prompt, workspace = {}, rawHistory = []) {
+    if (!(await this.isValidChatCompletionModel(this.model)))
+      throw new Error(
+        `Perplexity chat: ${this.model} is not valid for chat completion!`
+      );
+
+    const textResponse = await this.openai
+      .createChatCompletion({
+        model: this.model,
+        temperature: Number(workspace?.openAiTemp ?? this.defaultTemp),
+        n: 1,
+        messages: await this.compressMessages(
+          {
+            systemPrompt: chatPrompt(workspace),
+            userPrompt: prompt,
+            chatHistory,
+          },
+          rawHistory
+        ),
+      })
+      .then((json) => {
+        const res = json.data;
+        if (!res.hasOwnProperty("choices"))
+          throw new Error("Perplexity chat: No results!");
+        if (res.choices.length === 0)
+          throw new Error("Perplexity chat: No results length!");
+        return res.choices[0].message.content;
+      })
+      .catch((error) => {
+        throw new Error(
+          `Perplexity::createChatCompletion failed with: ${error.message}`
+        );
+      });
+
+    return textResponse;
+  }
+
+  async streamChat(chatHistory = [], prompt, workspace = {}, rawHistory = []) {
+    if (!(await this.isValidChatCompletionModel(this.model)))
+      throw new Error(
+        `Perplexity chat: ${this.model} is not valid for chat completion!`
+      );
+
+    const streamRequest = await this.openai.createChatCompletion(
+      {
+        model: this.model,
+        stream: true,
+        temperature: Number(workspace?.openAiTemp ?? this.defaultTemp),
+        n: 1,
+        messages: await this.compressMessages(
+          {
+            systemPrompt: chatPrompt(workspace),
+            userPrompt: prompt,
+            chatHistory,
+          },
+          rawHistory
+        ),
+      },
+      { responseType: "stream" }
+    );
+    return streamRequest;
+  }
+
+  async getChatCompletion(messages = null, { temperature = 0.7 }) {
+    if (!(await this.isValidChatCompletionModel(this.model)))
+      throw new Error(
+        `Perplexity chat: ${this.model} is not valid for chat completion!`
+      );
+
+    const { data } = await this.openai
+      .createChatCompletion({
+        model: this.model,
+        messages,
+        temperature,
+      })
+      .catch((e) => {
+        throw new Error(e.response.data.error.message);
+      });
+
+    if (!data.hasOwnProperty("choices")) return null;
+    return data.choices[0].message.content;
+  }
+
+  async streamGetChatCompletion(messages = null, { temperature = 0.7 }) {
+    if (!(await this.isValidChatCompletionModel(this.model)))
+      throw new Error(
+        `Perplexity chat: ${this.model} is not valid for chat completion!`
+      );
+
+    const streamRequest = await this.openai.createChatCompletion(
+      {
+        model: this.model,
+        stream: true,
+        messages,
+        temperature,
+      },
+      { responseType: "stream" }
+    );
+    return streamRequest;
+  }
+
+  handleStream(response, stream, responseProps) {
+    return handleDefaultStreamResponse(response, stream, responseProps);
+  }
+
+  // Simple wrapper for dynamic embedder & normalize interface for all LLM implementations
+  async embedTextInput(textInput) {
+    return await this.embedder.embedTextInput(textInput);
+  }
+  async embedChunks(textChunks = []) {
+    return await this.embedder.embedChunks(textChunks);
+  }
+
+  async compressMessages(promptArgs = {}, rawHistory = []) {
+    const { messageArrayCompressor } = require("../../helpers/chat");
+    const messageArray = this.constructPrompt(promptArgs);
+    return await messageArrayCompressor(this, messageArray, rawHistory);
+  }
+}
+
+module.exports = {
+  PerplexityLLM,
+  perplexityModels,
+};
diff --git a/server/utils/AiProviders/perplexity/models.js b/server/utils/AiProviders/perplexity/models.js
new file mode 100644
index 000000000..258cfeace
--- /dev/null
+++ b/server/utils/AiProviders/perplexity/models.js
@@ -0,0 +1,49 @@
+const MODELS = {
+  "codellama-34b-instruct": {
+    id: "codellama-34b-instruct",
+    name: "codellama-34b-instruct",
+    maxLength: 16384,
+  },
+  "codellama-70b-instruct": {
+    id: "codellama-70b-instruct",
+    name: "codellama-70b-instruct",
+    maxLength: 16384,
+  },
+  "llama-2-70b-chat": {
+    id: "llama-2-70b-chat",
+    name: "llama-2-70b-chat",
+    maxLength: 4096,
+  },
+  "mistral-7b-instruct": {
+    id: "mistral-7b-instruct",
+    name: "mistral-7b-instruct",
+    maxLength: 8192,
+  },
+  "mixtral-8x7b-instruct": {
+    id: "mixtral-8x7b-instruct",
+    name: "mixtral-8x7b-instruct",
+    maxLength: 8192,
+  },
+  "pplx-7b-chat": {
+    id: "pplx-7b-chat",
+    name: "pplx-7b-chat",
+    maxLength: 8192,
+  },
+  "pplx-70b-chat": {
+    id: "pplx-70b-chat",
+    name: "pplx-70b-chat",
+    maxLength: 8192,
+  },
+  "pplx-7b-online": {
+    id: "pplx-7b-online",
+    name: "pplx-7b-online",
+    maxLength: 8192,
+  },
+  "pplx-70b-online": {
+    id: "pplx-70b-online",
+    name: "pplx-70b-online",
+    maxLength: 8192,
+  },
+};
+
+module.exports.MODELS = MODELS;
diff --git a/server/utils/AiProviders/perplexity/scripts/.gitignore b/server/utils/AiProviders/perplexity/scripts/.gitignore
new file mode 100644
index 000000000..94a2dd146
--- /dev/null
+++ b/server/utils/AiProviders/perplexity/scripts/.gitignore
@@ -0,0 +1 @@
+*.json
\ No newline at end of file
diff --git a/server/utils/AiProviders/perplexity/scripts/chat_models.txt b/server/utils/AiProviders/perplexity/scripts/chat_models.txt
new file mode 100644
index 000000000..83f6d2a80
--- /dev/null
+++ b/server/utils/AiProviders/perplexity/scripts/chat_models.txt
@@ -0,0 +1,11 @@
+| Model                     | Context Length | Model Type      |
+| :------------------------ | :------------- | :-------------- |
+| `codellama-34b-instruct`  | 16384          | Chat Completion |
+| `codellama-70b-instruct`  | 16384          | Chat Completion |
+| `llama-2-70b-chat`        | 4096           | Chat Completion |
+| `mistral-7b-instruct` [2] | 8192 [1]       | Chat Completion |
+| `mixtral-8x7b-instruct`   | 8192 [1]       | Chat Completion |
+| `pplx-7b-chat`            | 8192           | Chat Completion |
+| `pplx-70b-chat`           | 8192           | Chat Completion |
+| `pplx-7b-online`          | 8192           | Chat Completion |
+| `pplx-70b-online`         | 8192           | Chat Completion |
\ No newline at end of file
diff --git a/server/utils/AiProviders/perplexity/scripts/parse.mjs b/server/utils/AiProviders/perplexity/scripts/parse.mjs
new file mode 100644
index 000000000..749a63dce
--- /dev/null
+++ b/server/utils/AiProviders/perplexity/scripts/parse.mjs
@@ -0,0 +1,44 @@
+// Perplexity does not provide a simple REST API to get models,
+// so we have a table which we copy from their documentation
+// https://docs.perplexity.ai/edit/model-cards that we can
+// then parse and get all models from in a format that makes sense
+// Why this does not exist is so bizarre, but whatever.
+
+// To run, cd into this directory and run `node parse.mjs`
+// copy outputs into the export in ../models.js
+
+// Update the date below if you run this again because Perplexity added new models.
+// Last Collected: Feb 22, 2024
+
+import fs from "fs";
+
+function parseChatModels() {
+  const models = {};
+  const tableString = fs.readFileSync("chat_models.txt", { encoding: "utf-8" });
+  const rows = tableString.split("\n").slice(2);
+
+  rows.forEach((row) => {
+    let [model, contextLength] = row
+      .split("|")
+      .slice(1, -1)
+      .map((text) => text.trim());
+    model = model.replace(/`|\s*\[\d+\]\s*/g, "");
+    const maxLength = Number(contextLength.replace(/\s*\[\d+\]\s*/g, ""));
+    if (model && maxLength) {
+      models[model] = {
+        id: model,
+        name: model,
+        maxLength: maxLength,
+      };
+    }
+  });
+
+  fs.writeFileSync(
+    "chat_models.json",
+    JSON.stringify(models, null, 2),
+    "utf-8"
+  );
+  return models;
+}
+
+parseChatModels();
diff --git a/server/utils/helpers/customModels.js b/server/utils/helpers/customModels.js
index 53c641e75..8f8ca0657 100644
--- a/server/utils/helpers/customModels.js
+++ b/server/utils/helpers/customModels.js
@@ -1,3 +1,4 @@
+const { perplexityModels } = require("../AiProviders/perplexity");
 const { togetherAiModels } = require("../AiProviders/togetherAi");
 const SUPPORT_CUSTOM_MODELS = [
   "openai",
@@ -6,6 +7,7 @@ const SUPPORT_CUSTOM_MODELS = [
   "native-llm",
   "togetherai",
   "mistral",
+  "perplexity",
 ];
 
 async function getCustomModels(provider = "", apiKey = null, basePath = null) {
@@ -25,6 +27,8 @@ async function getCustomModels(provider = "", apiKey = null, basePath = null) {
       return await getMistralModels(apiKey);
     case "native-llm":
       return nativeLLMModels();
+    case "perplexity":
+      return await getPerplexityModels();
     default:
       return { models: [], error: "Invalid provider for custom models" };
   }
@@ -120,6 +124,20 @@ async function getTogetherAiModels() {
   return { models, error: null };
 }
 
+async function getPerplexityModels() {
+  const knownModels = perplexityModels();
+  if (!Object.keys(knownModels).length === 0)
+    return { models: [], error: null };
+
+  const models = Object.values(knownModels).map((model) => {
+    return {
+      id: model.id,
+      name: model.name,
+    };
+  });
+  return { models, error: null };
+}
+
 async function getMistralModels(apiKey = null) {
   const { Configuration, OpenAIApi } = require("openai");
   const config = new Configuration({
diff --git a/server/utils/helpers/index.js b/server/utils/helpers/index.js
index 42ed262f9..818d92dbc 100644
--- a/server/utils/helpers/index.js
+++ b/server/utils/helpers/index.js
@@ -58,6 +58,9 @@ function getLLMProvider(modelPreference = null) {
     case "togetherai":
       const { TogetherAiLLM } = require("../AiProviders/togetherAi");
       return new TogetherAiLLM(embedder, modelPreference);
+    case "perplexity":
+      const { PerplexityLLM } = require("../AiProviders/perplexity");
+      return new PerplexityLLM(embedder, modelPreference);
     case "mistral":
       const { MistralLLM } = require("../AiProviders/mistral");
       return new MistralLLM(embedder, modelPreference);
diff --git a/server/utils/helpers/updateENV.js b/server/utils/helpers/updateENV.js
index f89a193f6..5a384740b 100644
--- a/server/utils/helpers/updateENV.js
+++ b/server/utils/helpers/updateENV.js
@@ -239,6 +239,16 @@ const KEY_MAPPING = {
     checks: [isNotEmpty],
   },
 
+  // Perplexity Options
+  PerplexityApiKey: {
+    envKey: "PERPLEXITY_API_KEY",
+    checks: [isNotEmpty],
+  },
+  PerplexityModelPref: {
+    envKey: "PERPLEXITY_MODEL_PREF",
+    checks: [isNotEmpty],
+  },
+
   // System Settings
   AuthToken: {
     envKey: "AUTH_TOKEN",
@@ -314,6 +324,7 @@ function supportedLLM(input = "") {
     "togetherai",
     "mistral",
     "huggingface",
+    "perplexity",
   ].includes(input);
   return validSelection ? null : `${input} is not a valid LLM provider.`;
 }
-- 
GitLab